New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

pico-engine-core

Package Overview
Dependencies
Maintainers
1
Versions
145
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

pico-engine-core - npm Package Compare versions

Comparing version 0.45.6 to 0.46.0

13

package.json
{
"name": "pico-engine-core",
"version": "0.45.6",
"version": "0.46.0",
"description": "The core javascript api for the pico-engine. (no http, logging, process management etc...)",

@@ -11,5 +11,3 @@ "main": "src/index.js",

"start": "SKIP_LONG_TESTS=true onchange -i src/ ../../test-rulesets/ -- npm run test -s",
"lint": "eslint src/ && echo lint-ok",
"lint-fix": "eslint src/ --fix",
"test": "npm run lint -s && node src/tests.js | faucet"
"test": "standard && node src/tests.js | faucet"
},

@@ -32,7 +30,7 @@ "repository": {

"devDependencies": {
"eslint": "^4.8.0",
"faucet": "0.0.1",
"krl-compiler": "^0.45.3",
"krl-compiler": "^0.46.0",
"memdown": "^1.4.1",
"onchange": "^3.2.1",
"standard": "^11.0.1",
"tape": "^4.6.0"

@@ -44,3 +42,2 @@ },

"bytewise": "^1.1.0",
"co-callback": "^2.0.0",
"comment-regex": "^1.0.0",

@@ -50,3 +47,3 @@ "cuid": "^1.3.8",

"encoding-down": "^3.0.0",
"krl-stdlib": "^0.45.5",
"krl-stdlib": "^0.46.0",
"level-json-coerce-null": "^1.0.1",

@@ -53,0 +50,0 @@ "levelup": "^2.0.0",

@@ -1,63 +0,61 @@

var _ = require("lodash");
var cocb = require("co-callback");
var async = require("async");
var _ = require('lodash')
var toFloat = function(v){
return parseFloat(v) || 0;
};
function toFloat (v) {
return parseFloat(v) || 0
}
var aggregateWrap = function(core, current_state_machine_state, rule, ctx, value_pairs, fn, callback){
async.each(value_pairs, function(pair, next){
var name = pair[0];
var value = pair[1] === void 0
? null//leveldb doesnt support undefined
: pair[1];
core.db.updateAggregatorVar(ctx.pico_id, rule, name, function(val){
if(current_state_machine_state === "start"){
//reset the aggregated values every time the state machine resets
return [value];
}else if(current_state_machine_state === "end"){
//keep a sliding window every time the state machine hits end again i.e. select when repeat ..
return _.tail(val.concat([value]));
}
return val.concat([value]);
}, function(err, val){
if(err) return next(err);
ctx.scope.set(name, fn(val));
next();
});
}, callback);
};
function aggregateWrap (core, currentStateMachineState, rule, ctx, valuePairs, fn) {
return Promise.all(valuePairs.map(function (pair) {
var name = pair[0]
var value = pair[1] === void 0
? null// leveldb doesnt support undefined
: pair[1]
function updater (val) {
if (currentStateMachineState === 'start') {
// reset the aggregated values every time the state machine resets
return [value]
} else if (currentStateMachineState === 'end') {
// keep a sliding window every time the state machine hits end again i.e. select when repeat ..
return _.tail(val.concat([value]))
}
return val.concat([value])
}
return core.db.updateAggregatorVarYieldable(ctx.pico_id, rule, name, updater)
.then(function (val) {
ctx.scope.set(name, fn(val))
})
}))
}
var aggregators = {
max: function(values){
return _.max(_.map(values, toFloat));
},
min: function(values){
return _.min(_.map(values, toFloat));
},
sum: function(values){
return _.reduce(_.map(values, toFloat), function(sum, n){
return sum + n;
}, 0);
},
avg: function(values){
var sum = _.reduce(_.map(values, toFloat), function(sum, n){
return sum + n;
}, 0);
return sum / _.size(values);
},
push: function(values){
return values;
max: function (values) {
return _.max(_.map(values, toFloat))
},
min: function (values) {
return _.min(_.map(values, toFloat))
},
sum: function (values) {
return _.reduce(_.map(values, toFloat), function (sum, n) {
return sum + n
}, 0)
},
avg: function (values) {
var sum = _.reduce(_.map(values, toFloat), function (sum, n) {
return sum + n
}, 0)
return sum / _.size(values)
},
push: function (values) {
return values
}
}
module.exports = function (core, currentStateMachineState, rule) {
return function (ctx, aggregator, valuePairs) {
if (!_.has(aggregators, aggregator)) {
throw new Error('Unsupported aggregator: ' + aggregator)
}
};
module.exports = function(core, current_state_machine_state, rule){
return cocb.wrap(function(ctx, aggregator, value_pairs, callback){
if(_.has(aggregators, aggregator)){
aggregateWrap(core, current_state_machine_state, rule, ctx, value_pairs, aggregators[aggregator], callback);
return;
}
throw new Error("Unsupported aggregator: " + aggregator);
});
};
var fn = aggregators[aggregator]
return aggregateWrap(core, currentStateMachineState, rule, ctx, valuePairs, fn)
}
}

@@ -1,120 +0,115 @@

var _ = require("lodash");
var ktypes = require("krl-stdlib/types");
var _ = require('lodash')
var ktypes = require('krl-stdlib/types')
var isBlank = function (str) {
return !ktypes.isString(str) || str.trim().length === 0
}
var isBlank = function(str){
return !ktypes.isString(str) || str.trim().length === 0;
};
var assertOnlyAllowedProperties = function (name, obj, allowed) {
var extraProps = _.difference(_.keys(obj), allowed)
if (extraProps.length > 0) {
throw new Error(name + ' does not support properties: ' + extraProps.join(', '))
}
}
var assertOnlyAllowedProperties = function(name, obj, allowed){
var extra_props = _.difference(_.keys(obj), allowed);
if(extra_props.length > 0){
throw new Error(name + " does not support properties: " + extra_props.join(", "));
var cleanEventRules = function (rules) {
if (ktypes.isNull(rules)) {
return []
}
if (!_.isArray(rules)) {
throw new Error('`policy.event.<allow|deny>` must be an Array of rules')
}
return _.map(rules, function (ruleOrig) {
if (!ktypes.isMap(ruleOrig)) {
throw new Error('Policy rules must be Maps, not ' + ktypes.typeOf(ruleOrig))
}
};
assertOnlyAllowedProperties('Policy.event rule', ruleOrig, ['domain', 'type'])
var rule = {}
if (!isBlank(ruleOrig.domain)) {
rule.domain = ruleOrig.domain.trim()
}
if (!isBlank(ruleOrig.type)) {
rule.type = ruleOrig.type.trim()
}
return rule
})
}
var cleanQueryRules = function (rules) {
if (ktypes.isNull(rules)) {
return []
}
if (!_.isArray(rules)) {
throw new Error('`policy.query.<allow|deny>` must be an Array of rules')
}
return _.map(rules, function (ruleOrig) {
if (!ktypes.isMap(ruleOrig)) {
throw new Error('Policy rules must be Maps, not ' + ktypes.typeOf(ruleOrig))
}
assertOnlyAllowedProperties('Policy.query rule', ruleOrig, ['rid', 'name'])
var cleanEventRules = function(rules){
if(ktypes.isNull(rules)){
return [];
var rule = {}
if (!isBlank(ruleOrig.rid)) {
rule.rid = ruleOrig.rid.trim()
}
if( ! _.isArray(rules)){
throw new Error("`policy.event.<allow|deny>` must be an Array of rules");
if (!isBlank(ruleOrig.name)) {
rule.name = ruleOrig.name.trim()
}
return _.map(rules, function(rule_orig){
if( ! ktypes.isMap(rule_orig)){
throw new Error("Policy rules must be Maps, not " + ktypes.typeOf(rule_orig));
}
assertOnlyAllowedProperties("Policy.event rule", rule_orig, ["domain", "type"]);
return rule
})
}
var rule = {};
if(!isBlank(rule_orig.domain)){
rule.domain = rule_orig.domain.trim();
}
if(!isBlank(rule_orig.type)){
rule.type = rule_orig.type.trim();
}
return rule;
});
};
var clean = function (policy) {
if (!ktypes.isMap(policy)) {
throw new TypeError('Policy definition should be a Map, but was ' + ktypes.typeOf(policy))
}
if (isBlank(policy.name)) {
throw new Error('missing `policy.name`')
}
var cleanQueryRules = function(rules){
if(ktypes.isNull(rules)){
return [];
}
if( ! _.isArray(rules)){
throw new Error("`policy.query.<allow|deny>` must be an Array of rules");
}
return _.map(rules, function(rule_orig){
if( ! ktypes.isMap(rule_orig)){
throw new Error("Policy rules must be Maps, not " + ktypes.typeOf(rule_orig));
}
assertOnlyAllowedProperties("Policy.query rule", rule_orig, ["rid", "name"]);
assertOnlyAllowedProperties('Policy', policy, ['name', 'event', 'query'])
var rule = {};
if(!isBlank(rule_orig.rid)){
rule.rid = rule_orig.rid.trim();
}
if(!isBlank(rule_orig.name)){
rule.name = rule_orig.name.trim();
}
return rule;
});
};
if (policy.event) {
assertOnlyAllowedProperties('Policy.event', policy.event, ['deny', 'allow'])
}
if (policy.query) {
assertOnlyAllowedProperties('Policy.query', policy.query, ['deny', 'allow'])
}
var clean = function(policy){
if( ! ktypes.isMap(policy)){
throw new TypeError("Policy definition should be a Map, but was " + ktypes.typeOf(policy));
return {
name: policy.name.trim(),
event: {
deny: cleanEventRules(policy.event && policy.event.deny),
allow: cleanEventRules(policy.event && policy.event.allow)
},
query: {
deny: cleanQueryRules(policy.query && policy.query.deny),
allow: cleanQueryRules(policy.query && policy.query.allow)
}
}
}
if(isBlank(policy.name)){
throw new Error("missing `policy.name`");
module.exports = {
clean: clean,
assert: function (policy, type, data) {
if (type !== 'event' && type !== 'query') {
throw new Error("Channel can only assert type's \"event\" and \"query\"")
}
assertOnlyAllowedProperties("Policy", policy, ["name", "event", "query"]);
var matcher = function (rule) {
return _.every(rule, function (val, key) {
return val === data[key]
})
}
if(policy.event){
assertOnlyAllowedProperties("Policy.event", policy.event, ["deny", "allow"]);
if (_.find(policy[type].deny, matcher)) {
throw new Error('Denied by channel policy')
}
if(policy.query){
assertOnlyAllowedProperties("Policy.query", policy.query, ["deny", "allow"]);
if (!_.find(policy[type].allow, matcher)) {
throw new Error('Not allowed by channel policy')
}
return {
name: policy.name.trim(),
event: {
deny : cleanEventRules(policy.event && policy.event.deny),
allow: cleanEventRules(policy.event && policy.event.allow),
},
query: {
deny: cleanQueryRules(policy.query && policy.query.deny),
allow: cleanQueryRules(policy.query && policy.query.allow),
},
};
};
module.exports = {
clean: clean,
assert: function(policy, type, data){
if(type !== "event" && type !== "query"){
throw new Error("Channel can only assert type's \"event\" and \"query\"");
}
var matcher = function(rule){
return _.every(rule, function(val, key){
return val === data[key];
});
};
if(_.find(policy[type].deny, matcher)){
throw new Error("Denied by channel policy");
}
if( ! _.find(policy[type].allow, matcher)){
throw new Error("Not allowed by channel policy");
}
//allowed
},
};
// allowed
}
}

@@ -1,192 +0,190 @@

var test = require("tape");
var ChannelPolicy = require("./ChannelPolicy");
var test = require('tape')
var ChannelPolicy = require('./ChannelPolicy')
test("policy = ChannelPolicy.clean(policy)", function(t){
var cleanIt = ChannelPolicy.clean;
test('policy = ChannelPolicy.clean(policy)', function (t) {
var cleanIt = ChannelPolicy.clean
try{
cleanIt(null);
t.fail("should throw");
}catch(e){
t.equals(e + "", "TypeError: Policy definition should be a Map, but was Null");
}
try{
cleanIt({});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing `policy.name`");
}
try{
cleanIt({name: " "});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing `policy.name`");
}
try {
cleanIt(null)
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'TypeError: Policy definition should be a Map, but was Null')
}
try {
cleanIt({})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing `policy.name`')
}
try {
cleanIt({name: ' '})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing `policy.name`')
}
try {
cleanIt({
name: 'foo',
event: {allow: '*'}
})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: `policy.event.<allow|deny>` must be an Array of rules')
}
try {
cleanIt({
name: 'foo',
query: {allow: 'ALL'}
})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: `policy.query.<allow|deny>` must be an Array of rules')
}
try{
cleanIt({
name: "foo",
event: {allow: "*"},
});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: `policy.event.<allow|deny>` must be an Array of rules");
try {
cleanIt({
name: 'foo',
event: {allow: ['wat']}
})
t.fail('should throw..')
} catch (e) {
t.equals(e + '', 'Error: Policy rules must be Maps, not String')
}
try {
cleanIt({
name: 'foo',
query: {allow: ['wat']}
})
t.fail('should throw..')
} catch (e) {
t.equals(e + '', 'Error: Policy rules must be Maps, not String')
}
try {
cleanIt({
name: 'foo',
bar: {allow: ['wat']},
baz: true
})
t.fail('should throw..')
} catch (e) {
t.equals(e + '', 'Error: Policy does not support properties: bar, baz')
}
try {
cleanIt({
name: 'foo',
event: {allow: [{}], wat: []}
})
t.fail('should throw..')
} catch (e) {
t.equals(e + '', 'Error: Policy.event does not support properties: wat')
}
try {
cleanIt({
name: 'foo',
query: {allow: [{}], wat: []}
})
t.fail('should throw..')
} catch (e) {
t.equals(e + '', 'Error: Policy.query does not support properties: wat')
}
try {
cleanIt({
name: 'foo',
event: {allow: [{wat: 1}]}
})
t.fail('should throw..')
} catch (e) {
t.equals(e + '', 'Error: Policy.event rule does not support properties: wat')
}
try {
cleanIt({
name: 'foo',
query: {allow: [{wat: 1}]}
})
t.fail('should throw..')
} catch (e) {
t.equals(e + '', 'Error: Policy.query rule does not support properties: wat')
}
t.deepEquals(cleanIt({
name: 'foo'
}), {
name: 'foo',
event: {
deny: [],
allow: []
},
query: {
deny: [],
allow: []
}
try{
cleanIt({
name: "foo",
query: {allow: "ALL"},
});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: `policy.query.<allow|deny>` must be an Array of rules");
})
t.deepEquals(cleanIt({
name: 'foo',
event: {allow: [{}]}
}), {
name: 'foo',
event: {
deny: [],
allow: [{}]
},
query: {
deny: [],
allow: []
}
})
try{
cleanIt({
name: "foo",
event: {allow: ["wat"]},
});
t.fail("should throw..");
}catch(e){
t.equals(e + "", "Error: Policy rules must be Maps, not String");
t.deepEquals(cleanIt({
name: ' foo ',
event: {
allow: [
{domain: 'one ', type: 'thrEE'},
{domain: ' fIVe '},
{type: '\tsix '}
]
}
try{
cleanIt({
name: "foo",
query: {allow: ["wat"]},
});
t.fail("should throw..");
}catch(e){
t.equals(e + "", "Error: Policy rules must be Maps, not String");
}), {
name: 'foo',
event: {
deny: [],
allow: [
{domain: 'one', type: 'thrEE'},
{domain: 'fIVe'},
{type: 'six'}
]
},
query: {
deny: [],
allow: []
}
try{
cleanIt({
name: "foo",
bar: {allow: ["wat"]},
baz: true,
});
t.fail("should throw..");
}catch(e){
t.equals(e + "", "Error: Policy does not support properties: bar, baz");
})
t.deepEquals(cleanIt({
name: ' foo ',
query: {
allow: [
{rid: 'one ', name: 'thrEE'},
{rid: ' fIVe '},
{name: '\tsix '}
]
}
try{
cleanIt({
name: "foo",
event: {allow: [{}], wat: []},
});
t.fail("should throw..");
}catch(e){
t.equals(e + "", "Error: Policy.event does not support properties: wat");
}), {
name: 'foo',
event: {
deny: [],
allow: []
},
query: {
deny: [],
allow: [
{rid: 'one', name: 'thrEE'},
{rid: 'fIVe'},
{name: 'six'}
]
}
try{
cleanIt({
name: "foo",
query: {allow: [{}], wat: []},
});
t.fail("should throw..");
}catch(e){
t.equals(e + "", "Error: Policy.query does not support properties: wat");
}
try{
cleanIt({
name: "foo",
event: {allow: [{wat: 1}]},
});
t.fail("should throw..");
}catch(e){
t.equals(e + "", "Error: Policy.event rule does not support properties: wat");
}
try{
cleanIt({
name: "foo",
query: {allow: [{wat: 1}]},
});
t.fail("should throw..");
}catch(e){
t.equals(e + "", "Error: Policy.query rule does not support properties: wat");
}
})
t.deepEquals(cleanIt({
name: "foo",
}), {
name: "foo",
event: {
deny: [],
allow: [],
},
query: {
deny: [],
allow: [],
},
});
t.deepEquals(cleanIt({
name: "foo",
event: {allow: [{}]},
}), {
name: "foo",
event: {
deny: [],
allow: [{}],
},
query: {
deny: [],
allow: [],
},
});
t.deepEquals(cleanIt({
name: " foo ",
event: {
allow: [
{domain: "one ", type: "thrEE"},
{domain: " fIVe "},
{type: "\tsix "},
]
},
}), {
name: "foo",
event: {
deny: [],
allow: [
{domain: "one", type: "thrEE"},
{domain: "fIVe"},
{type: "six"},
],
},
query: {
deny: [],
allow: [],
},
});
t.deepEquals(cleanIt({
name: " foo ",
query: {
allow: [
{rid: "one ", name: "thrEE"},
{rid: " fIVe "},
{name: "\tsix "},
]
},
}), {
name: "foo",
event: {
deny: [],
allow: [],
},
query: {
deny: [],
allow: [
{rid: "one", name: "thrEE"},
{rid: "fIVe"},
{name: "six"},
],
},
});
t.end();
});
t.end()
})

@@ -1,10 +0,10 @@

var _ = require("lodash");
var ktypes = require("krl-stdlib/types");
var _ = require('lodash')
var ktypes = require('krl-stdlib/types')
var isBlank = function(str){
if(!_.isString(str)){
return true;
}
return str.trim().length === 0;
};
var isBlank = function (str) {
if (!_.isString(str)) {
return true
}
return str.trim().length === 0
}

@@ -17,42 +17,41 @@ /**

*/
module.exports = function(event_orig){
module.exports = function (eventOrig) {
if (isBlank(eventOrig && eventOrig.eci)) {
throw new Error('missing event.eci')
}
if (isBlank(eventOrig.domain)) {
throw new Error('missing event.domain')
}
if (isBlank(eventOrig.type)) {
throw new Error('missing event.type')
}
if(isBlank(event_orig && event_orig.eci)){
throw new Error("missing event.eci");
var attrs = {}
if (_.has(eventOrig, 'attrs')) {
// we want to make sure only json-able values are in the attrs
// also want to clone it as to not mutate the original copy
var attrsJson = ktypes.encode(eventOrig.attrs)
// only if it's a map or array do we consider it valid
if (attrsJson[0] === '{' || attrsJson[0] === '[') {
attrs = ktypes.decode(attrsJson)
}
if(isBlank(event_orig.domain)){
throw new Error("missing event.domain");
}
if(isBlank(event_orig.type)){
throw new Error("missing event.type");
}
}
var attrs = {};
if(_.has(event_orig, "attrs")){
//we want to make sure only json-able values are in the attrs
//also want to clone it as to not mutate the original copy
var attrs_json = ktypes.encode(event_orig.attrs);
//only if it's a map or array do we consider it valid
if(attrs_json[0] === "{" || attrs_json[0] === "["){
attrs = ktypes.decode(attrs_json);
}
}
var eid = ktypes.toString(eventOrig.eid).trim()
if (eid.length === 0 || eid === 'null') {
eid = 'none'
}
var eid = ktypes.toString(event_orig.eid).trim();
if(eid.length === 0 || eid === "null"){
eid = "none";
}
return {
return {
eci: eventOrig.eci.trim(),
eci: event_orig.eci.trim(),
eid: eid,
eid: eid,
domain: eventOrig.domain.trim(),
type: eventOrig.type.trim(),
domain: event_orig.domain.trim(),
type: event_orig.type.trim(),
attrs: attrs
attrs: attrs,
};
};
}
}

@@ -1,192 +0,187 @@

var test = require("tape");
var cleanEvent = require("./cleanEvent");
var test = require('tape')
var cleanEvent = require('./cleanEvent')
test("event = cleanEvent(event)", function(t){
test('event = cleanEvent(event)', function (t) {
try {
cleanEvent()
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing event.eci')
}
try {
cleanEvent({eci: 0})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing event.eci')
}
try {
cleanEvent({eci: ''})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing event.eci')
}
try {
cleanEvent({eci: ' '})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing event.eci')
}
try {
cleanEvent({eci: 'eci-1', domain: ''})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing event.domain')
}
try {
cleanEvent({eci: 'eci-1', domain: 'foo'})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing event.type')
}
try {
cleanEvent({eci: 'eci-1', domain: 'foo', type: ' '})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing event.type')
}
try{
cleanEvent();
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing event.eci");
}
try{
cleanEvent({eci: 0});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing event.eci");
}
try{
cleanEvent({eci: ""});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing event.eci");
}
try{
cleanEvent({eci: " "});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing event.eci");
}
try{
cleanEvent({eci: "eci-1", domain: ""});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing event.domain");
}
try{
cleanEvent({eci: "eci-1", domain: "foo"});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing event.type");
}
try{
cleanEvent({eci: "eci-1", domain: "foo", type: " "});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing event.type");
}
// bare minimum
t.deepEquals(cleanEvent({
eci: 'eci123',
domain: 'foo',
type: 'bar'
}), {
eci: 'eci123',
eid: 'none',
domain: 'foo',
type: 'bar',
attrs: {}
})
//bare minimum
t.deepEquals(cleanEvent({
eci: "eci123",
domain: "foo",
type: "bar",
}), {
eci: "eci123",
eid: "none",
domain: "foo",
type: "bar",
attrs: {},
});
// attrs - should not be mutable
var attrs = {what: {is: ['this']}}
var event = cleanEvent({
eci: 'eci123',
eid: '555',
domain: 'foo',
type: 'bar',
attrs: attrs
})
t.deepEquals(event, {
eci: 'eci123',
eid: '555',
domain: 'foo',
type: 'bar',
attrs: attrs
})
t.deepEquals(event.attrs, attrs, 'they should match before event.attrs mutates')
event.attrs.what = 'blah'
t.notDeepEqual(event.attrs, attrs, 'oops, attrs was mutable')
// trim up inputs
t.deepEquals(cleanEvent({
eci: ' eci123 ',
eid: ' 3 3 3 3 ',
domain: ' foo\n ',
type: ' \t bar ',
attrs: {' foo ': " don't trim these "}
}), {
eci: 'eci123',
eid: '3 3 3 3',
domain: 'foo',
type: 'bar',
attrs: {' foo ': " don't trim these "}
})
//attrs - should not be mutable
var attrs = {what: {is: ["this"]}};
var event = cleanEvent({
eci: "eci123",
eid: "555",
domain: "foo",
type: "bar",
attrs: attrs
});
t.deepEquals(event, {
eci: "eci123",
eid: "555",
domain: "foo",
type: "bar",
attrs: attrs,
});
t.deepEquals(event.attrs, attrs, "they should match before event.attrs mutates");
event.attrs.what = "blah";
t.notDeepEqual(event.attrs, attrs, "oops, attrs was mutable");
// no timestamp
t.deepEquals(cleanEvent({
eci: 'eci123',
domain: 'foo',
type: 'bar',
timestamp: new Date()
}), {
eci: 'eci123',
eid: 'none',
domain: 'foo',
type: 'bar',
attrs: {}
})
// no for_rid
t.deepEquals(cleanEvent({
eci: 'eci123',
domain: 'foo',
type: 'bar',
for_rid: 'rid'
}), {
eci: 'eci123',
eid: 'none',
domain: 'foo',
type: 'bar',
attrs: {}
})
//trim up inputs
var testAttrs = function (input, output, msg) {
t.deepEquals(cleanEvent({
eci: " eci123 ",
eid: " 3 3 3 3 ",
domain: " foo\n ",
type: " \t bar ",
attrs: {" foo ": " don't trim these "}
}), {
eci: "eci123",
eid: "3 3 3 3",
domain: "foo",
type: "bar",
attrs: {" foo ": " don't trim these "}
});
eci: 'eci123',
eid: 'eid',
domain: 'foo',
type: 'bar',
attrs: input
}).attrs, output, msg)
}
//no timestamp
t.deepEquals(cleanEvent({
eci: "eci123",
domain: "foo",
type: "bar",
timestamp: new Date(),
}), {
eci: "eci123",
eid: "none",
domain: "foo",
type: "bar",
attrs: {},
});
testAttrs({
fn: function () {}
}, {
fn: '[Function]'
}, 'convert attrs via KRL json encode')
testAttrs(function () {}, {}, 'attrs must be a map or array')
//no for_rid
t.deepEquals(cleanEvent({
eci: "eci123",
domain: "foo",
type: "bar",
for_rid: "rid",
}), {
eci: "eci123",
eid: "none",
domain: "foo",
type: "bar",
attrs: {},
});
testAttrs(
[0, 1, 'a', null, void 0, NaN],
[0, 1, 'a', null, null, null],
"attrs normalize to JSON null's"
)
var testAttrs = function(input, output, msg){
t.deepEquals(cleanEvent({
eci: "eci123",
eid: "eid",
domain: "foo",
type: "bar",
attrs: input,
}).attrs, output, msg);
};
testAttrs(
{a: null, b: void 0, c: NaN},
{a: null, b: null, c: null},
"attrs normalize to JSON null's"
);
testAttrs({
fn: function(){}
}, {
fn: "[Function]"
}, "convert attrs via KRL json encode");
testAttrs(function(){}, {}, "attrs must be a map or array");
(function () {
testAttrs(
[0, 1, "a", null, void 0, NaN],
[0, 1, "a", null, null, null],
"attrs normalize to JSON null's"
);
arguments,
{'0': 'foo', '1': 'bar'},
'non "plain" objects should work as Maps'
)
}('foo', 'bar'))
testAttrs(
{a: null, b: void 0, c: NaN},
{a: null, b: null, c: null},
"attrs normalize to JSON null's"
);
var testEid = function (input, output, msg) {
t.deepEquals(cleanEvent({
eci: 'eci123',
eid: input,
domain: 'foo',
type: 'bar'
}).eid, output, msg)
}
(function(){
testAttrs(
arguments,
{"0": "foo", "1": "bar"},
"non \"plain\" objects should work as Maps"
);
}("foo", "bar"));
testEid(' foo ', 'foo')
testEid('', 'none')
testEid(' ', 'none')
testEid(null, 'none')
testEid(NaN, 'none')
testEid(void 0, 'none')
testEid('null', 'none')
testEid([1, 2], '[Array]')
testEid({foo: 'bar'}, '[Map]')
var testEid = function(input, output, msg){
t.deepEquals(cleanEvent({
eci: "eci123",
eid: input,
domain: "foo",
type: "bar",
}).eid, output, msg);
};
testEid(123, '123')
testEid(123.0, '123')
testEid(0.7500, '0.75')
testEid(" foo ", "foo");
testEid("", "none");
testEid(" ", "none");
testEid(null, "none");
testEid(NaN, "none");
testEid(void 0, "none");
testEid("null", "none");
testEid([1, 2], "[Array]");
testEid({foo: "bar"}, "[Map]");
testEid(123, "123");
testEid(123.0, "123");
testEid(.7500, "0.75");
t.end();
});
t.end()
})

@@ -1,10 +0,10 @@

var _ = require("lodash");
var ktypes = require("krl-stdlib/types");
var _ = require('lodash')
var ktypes = require('krl-stdlib/types')
var isBlank = function(str){
if(!_.isString(str)){
return true;
}
return str.trim().length === 0;
};
var isBlank = function (str) {
if (!_.isString(str)) {
return true
}
return str.trim().length === 0
}

@@ -17,34 +17,33 @@ /**

*/
module.exports = function(query_orig){
module.exports = function (queryOrig) {
if (isBlank(queryOrig && queryOrig.eci)) {
throw new Error('missing query.eci')
}
if (isBlank(queryOrig.rid)) {
throw new Error('missing query.rid')
}
if (isBlank(queryOrig.name)) {
throw new Error('missing query.name')
}
if(isBlank(query_orig && query_orig.eci)){
throw new Error("missing query.eci");
var args = {}
if (_.has(queryOrig, 'args')) {
// we want to make sure only json-able values are in the args
// also want to clone it as to not mutate the original copy
var attrsJson = ktypes.encode(queryOrig.args)
// only if it's a map or array do we consider it valid
if (attrsJson[0] === '{' || attrsJson[0] === '[') {
args = ktypes.decode(attrsJson)
}
if(isBlank(query_orig.rid)){
throw new Error("missing query.rid");
}
if(isBlank(query_orig.name)){
throw new Error("missing query.name");
}
}
var args = {};
if(_.has(query_orig, "args")){
//we want to make sure only json-able values are in the args
//also want to clone it as to not mutate the original copy
var attrs_json = ktypes.encode(query_orig.args);
//only if it's a map or array do we consider it valid
if(attrs_json[0] === "{" || attrs_json[0] === "["){
args = ktypes.decode(attrs_json);
}
}
return {
eci: queryOrig.eci.trim(),
return {
eci: query_orig.eci.trim(),
rid: queryOrig.rid.trim(),
rid: query_orig.rid.trim(),
name: queryOrig.name.trim(),
name: query_orig.name.trim(),
args: args,
};
};
args: args
}
}

@@ -1,159 +0,155 @@

var test = require("tape");
var cleanQuery = require("./cleanQuery");
var test = require('tape')
var cleanQuery = require('./cleanQuery')
test("query = cleanQuery(query)", function(t){
test('query = cleanQuery(query)', function (t) {
try {
cleanQuery()
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing query.eci')
}
try {
cleanQuery({eci: 0})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing query.eci')
}
try {
cleanQuery({eci: ''})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing query.eci')
}
try {
cleanQuery({eci: ' '})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing query.eci')
}
try {
cleanQuery({eci: 'eci-1', rid: ''})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing query.rid')
}
try {
cleanQuery({eci: 'eci-1', rid: 'foo'})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing query.name')
}
try {
cleanQuery({eci: 'eci-1', rid: 'foo', name: ' '})
t.fail('should throw')
} catch (e) {
t.equals(e + '', 'Error: missing query.name')
}
try{
cleanQuery();
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing query.eci");
}
try{
cleanQuery({eci: 0});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing query.eci");
}
try{
cleanQuery({eci: ""});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing query.eci");
}
try{
cleanQuery({eci: " "});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing query.eci");
}
try{
cleanQuery({eci: "eci-1", rid: ""});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing query.rid");
}
try{
cleanQuery({eci: "eci-1", rid: "foo"});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing query.name");
}
try{
cleanQuery({eci: "eci-1", rid: "foo", name: " "});
t.fail("should throw");
}catch(e){
t.equals(e + "", "Error: missing query.name");
}
// bare minimum
t.deepEquals(cleanQuery({
eci: 'eci123',
rid: 'foo',
name: 'bar'
}), {
eci: 'eci123',
rid: 'foo',
name: 'bar',
args: {}
})
//bare minimum
t.deepEquals(cleanQuery({
eci: "eci123",
rid: "foo",
name: "bar",
}), {
eci: "eci123",
rid: "foo",
name: "bar",
args: {},
});
// args - should not be mutable
var args = {what: {is: ['this']}}
var query = cleanQuery({
eci: 'eci123',
rid: 'foo',
name: 'bar',
args: args
})
t.deepEquals(query, {
eci: 'eci123',
rid: 'foo',
name: 'bar',
args: args
})
t.deepEquals(query.args, args, 'they should match before query.args mutates')
query.args.what = 'blah'
t.notDeepEqual(query.args, args, 'oops, args was mutable')
// trim up inputs
t.deepEquals(cleanQuery({
eci: ' eci123 ',
rid: ' foo\n ',
name: ' \t bar ',
args: {' foo ': " don't trim these "}
}), {
eci: 'eci123',
rid: 'foo',
name: 'bar',
args: {' foo ': " don't trim these "}
})
//args - should not be mutable
var args = {what: {is: ["this"]}};
var query = cleanQuery({
eci: "eci123",
rid: "foo",
name: "bar",
args: args
});
t.deepEquals(query, {
eci: "eci123",
rid: "foo",
name: "bar",
args: args,
});
t.deepEquals(query.args, args, "they should match before query.args mutates");
query.args.what = "blah";
t.notDeepEqual(query.args, args, "oops, args was mutable");
// no timestamp
t.deepEquals(cleanQuery({
eci: 'eci123',
rid: 'foo',
name: 'bar',
timestamp: new Date()
}), {
eci: 'eci123',
rid: 'foo',
name: 'bar',
args: {}
})
// no for_rid
t.deepEquals(cleanQuery({
eci: 'eci123',
rid: 'foo',
name: 'bar',
for_rid: 'rid'
}), {
eci: 'eci123',
rid: 'foo',
name: 'bar',
args: {}
})
//trim up inputs
var testAttrs = function (input, output, msg) {
t.deepEquals(cleanQuery({
eci: " eci123 ",
rid: " foo\n ",
name: " \t bar ",
args: {" foo ": " don't trim these "}
}), {
eci: "eci123",
rid: "foo",
name: "bar",
args: {" foo ": " don't trim these "}
});
eci: 'eci123',
rid: 'foo',
name: 'bar',
args: input
}).args, output, msg)
}
//no timestamp
t.deepEquals(cleanQuery({
eci: "eci123",
rid: "foo",
name: "bar",
timestamp: new Date(),
}), {
eci: "eci123",
rid: "foo",
name: "bar",
args: {},
});
testAttrs({
fn: function () {}
}, {
fn: '[Function]'
}, 'convert args via KRL json encode')
testAttrs(function () {}, {}, 'args must be a map or array')
//no for_rid
t.deepEquals(cleanQuery({
eci: "eci123",
rid: "foo",
name: "bar",
for_rid: "rid",
}), {
eci: "eci123",
rid: "foo",
name: "bar",
args: {},
});
testAttrs(
[0, 1, 'a', null, void 0, NaN],
[0, 1, 'a', null, null, null],
"args normalize to JSON null's"
)
var testAttrs = function(input, output, msg){
t.deepEquals(cleanQuery({
eci: "eci123",
rid: "foo",
name: "bar",
args: input,
}).args, output, msg);
};
testAttrs(
{a: null, b: void 0, c: NaN},
{a: null, b: null, c: null},
"args normalize to JSON null's"
);
testAttrs({
fn: function(){}
}, {
fn: "[Function]"
}, "convert args via KRL json encode");
testAttrs(function(){}, {}, "args must be a map or array");
(function () {
testAttrs(
[0, 1, "a", null, void 0, NaN],
[0, 1, "a", null, null, null],
"args normalize to JSON null's"
);
arguments,
{'0': 'foo', '1': 'bar'},
'non "plain" objects should work as Maps'
)
}('foo', 'bar'))
testAttrs(
{a: null, b: void 0, c: NaN},
{a: null, b: null, c: null},
"args normalize to JSON null's"
);
(function(){
testAttrs(
arguments,
{"0": "foo", "1": "bar"},
"non \"plain\" objects should work as Maps"
);
}("foo", "bar"));
t.end();
});
t.end()
})

@@ -1,1136 +0,1111 @@

var _ = require("lodash");
var bs58 = require("bs58");
var cuid = require("cuid");
var async = require("async");
var crypto = require("crypto");
var encode = require("encoding-down");
var ktypes = require("krl-stdlib/types");
var dbRange = require("./dbRange");
var levelup = require("levelup");
var bytewise = require("bytewise");
var sovrinDID = require("sovrin-did");
var migrations = require("./migrations");
var ChannelPolicy = require("./ChannelPolicy");
var safeJsonCodec = require("level-json-coerce-null");
var extractRulesetID = require("./extractRulesetID");
var _ = require('lodash')
var bs58 = require('bs58')
var cuid = require('cuid')
var async = require('async')
var crypto = require('crypto')
var encode = require('encoding-down')
var ktypes = require('krl-stdlib/types')
var dbRange = require('./dbRange')
var levelup = require('levelup')
var bytewise = require('bytewise')
var sovrinDID = require('sovrin-did')
var migrations = require('./migrations')
var ChannelPolicy = require('./ChannelPolicy')
var safeJsonCodec = require('level-json-coerce-null')
var extractRulesetID = require('./extractRulesetID')
// NOTE: for now we are going to default to an allow all policy
// This makes migrating easier while waiting for krl system rulesets to assume policy usage
var ADMIN_POLICY_ID = 'allow-all-policy'// NOTE: changing this requires a db migration
//NOTE: for now we are going to default to an allow all policy
//This makes migrating easier while waiting for krl system rulesets to assume policy usage
var ADMIN_POLICY_ID = "allow-all-policy";//NOTE: changing this requires a db migration
var omitChannelSecret = function (channel) {
return _.assign({}, channel, {
sovrin: _.omit(channel.sovrin, 'secret')
})
}
// coerce the value into an array of key strings
var toKeyPath = function (path) {
if (!ktypes.isArray(path)) {
path = [path]
}
return _.map(path, function (key) {
return ktypes.toString(key)
})
}
var omitChannelSecret = function(channel){
return _.assign({}, channel, {
sovrin: _.omit(channel.sovrin, "secret"),
});
};
var putPVar = function (ldb, keyPrefix, query, val, callback) {
query = ktypes.isNull(query) ? [] : query
query = ktypes.isArray(query) ? query : [query]
// do this before toKeyPath b/c it will convert all parts to stings
var isArrayIndex = _.isInteger(query[0]) && query[0] >= 0
var path = toKeyPath(query)
if (_.size(path) > 0) {
var subkeyPrefix = keyPrefix.concat(['value', path[0]])
var subPath = _.tail(path)
ldb.get(keyPrefix, function (err, oldRoot) {
if (err && !err.notFound) {
callback(err)
return
}
var ops = []
var type = oldRoot && oldRoot.type
if (type !== 'Map' && type !== 'Array') {
type = isArrayIndex ? 'Array' : 'Map'
}
if (type === 'Array' && !isArrayIndex) {
type = 'Map'// convert to map if a non-index key comes in
}
var root = {
type: type,
// this `value` helps _.set in the toObj db dump set the right type
value: type === 'Array' ? [] : {}
}
ops.push({type: 'put', key: keyPrefix, value: root})
//coerce the value into an array of key strings
var toKeyPath = function(path){
if(!ktypes.isArray(path)){
path = [path];
if (_.isEmpty(subPath)) {
ops.push({type: 'put', key: subkeyPrefix, value: val})
ldb.batch(ops, callback)
return
}
ldb.get(subkeyPrefix, function (err, data) {
if (err && err.notFound) {
data = {}
} else if (err) {
callback(err)
return
}
data = _.set(data, subPath, val)
ops.push({type: 'put', key: subkeyPrefix, value: data})
ldb.batch(ops, callback)
})
})
return
}
ldb.get(keyPrefix, function (err, oldRoot) {
if (err && !err.notFound) {
callback(err)
return
}
return _.map(path, function(key){
return ktypes.toString(key);
});
};
var ops = []
dbRange(ldb, {
prefix: keyPrefix,
values: false
}, function (key) {
ops.push({type: 'del', key: key})
}, function (err) {
if (err) return callback(err)
var root = {
type: ktypes.typeOf(val)
}
switch (root.type) {
case 'Null':
root.value = null
break
case 'Function':
case 'Action':
root.type = 'String'
root.value = ktypes.toString(val)
break
case 'Map':
case 'Array':
_.each(val, function (v, k) {
k = ktypes.toString(k)// represent array i as strings, otherwise bytewise will create separate keys for int and string
ops.push({
type: 'put',
key: keyPrefix.concat(['value', k]),
value: v
})
})
// this `value` helps _.set in the toObj db dump set the right type
root.value = root.type === 'Array' ? [] : {}
break
default:
root.value = val
}
ops.push({
type: 'put',
key: keyPrefix,
value: root
})
ldb.batch(ops, callback)
})
})
}
var putPVar = function(ldb, key_prefix, query, val, callback){
query = ktypes.isNull(query) ? [] : query;
query = ktypes.isArray(query) ? query : [query];
// do this before toKeyPath b/c it will convert all parts to stings
var isArrayIndex = _.isInteger(query[0]) && query[0] >= 0;
var path = toKeyPath(query);
if(_.size(path) > 0){
var subkeyPrefix = key_prefix.concat(["value", path[0]]);
var subPath = _.tail(path);
ldb.get(key_prefix, function(err, oldRoot){
if(err && ! err.notFound){
callback(err);
return;
}
var ops = [];
var type = oldRoot && oldRoot.type;
if(type !== "Map" && type !== "Array"){
type = isArrayIndex ? "Array" : "Map";
}
if(type === "Array" && !isArrayIndex){
type = "Map";// convert to map if a non-index key comes in
}
var root = {
type: type,
// this `value` helps _.set in the toObj db dump set the right type
value: type === "Array" ? [] : {}
};
ops.push({type: "put", key: key_prefix, value: root});
if(_.isEmpty(subPath)){
ops.push({type: "put", key: subkeyPrefix, value: val});
ldb.batch(ops, callback);
return;
}
ldb.get(subkeyPrefix, function(err, data){
if(err && err.notFound){
data = {};
}else if(err){
callback(err);
return;
}
data = _.set(data, subPath, val);
ops.push({type: "put", key: subkeyPrefix, value: data});
ldb.batch(ops, callback);
});
});
return;
var getPVar = function (ldb, keyPrefix, query, callback) {
var path = ktypes.isNull(query) ? [] : toKeyPath(query)
if (_.size(path) > 0) {
var subKey = _.head(path)
var subPath = _.tail(path)
ldb.get(keyPrefix.concat(['value', subKey]), function (err, data) {
if (err && err.notFound) {
return callback()
} else if (err) {
return callback(err)
}
if (!_.isEmpty(subPath)) {
data = _.get(data, subPath)
}
callback(null, data)
})
return
}
ldb.get(keyPrefix, function (err, data) {
if (err && err.notFound) {
return callback()
} else if (err) {
return callback(err)
}
ldb.get(key_prefix, function(err, oldRoot){
if(err && ! err.notFound){
callback(err);
return;
}
var ops = [];
dbRange(ldb, {
prefix: key_prefix,
values: false,
}, function(key){
ops.push({type: "del", key: key});
}, function(err){
if(err) return callback(err);
var root = {
type: ktypes.typeOf(val)
};
switch(root.type){
case "Null":
root.value = null;
break;
case "Function":
case "Action":
root.type = "String";
root.value = ktypes.toString(val);
break;
case "Map":
case "Array":
_.each(val, function(v, k){
ops.push({
type: "put",
key: key_prefix.concat(["value", k]),
value: v,
});
});
// this `value` helps _.set in the toObj db dump set the right type
root.value = root.type === "Array" ? [] : {};
break;
default:
root.value = val;
}
ops.push({
type: "put",
key: key_prefix,
value: root,
});
ldb.batch(ops, callback);
});
});
};
if (data.type !== 'Map' && data.type !== 'Array') {
return callback(null, data.value)
}
var value = data.type === 'Array' ? [] : {}
dbRange(ldb, {
prefix: keyPrefix
}, function (data) {
if (data.key.length === (keyPrefix.length + 2)) {
value[data.key[keyPrefix.length + 1]] = data.value
}
}, function (err) {
callback(err, value)
})
})
}
var getPVar = function(ldb, key_prefix, query, callback){
var path = ktypes.isNull(query) ? [] : toKeyPath(query);
if(_.size(path) > 0){
var sub_key = _.head(path);
var sub_path = _.tail(path);
ldb.get(key_prefix.concat(["value", sub_key]), function(err, data){
if(err && err.notFound){
return callback();
}else if(err){
return callback(err);
}
if(!_.isEmpty(sub_path)){
data = _.get(data, sub_path);
}
callback(null, data);
});
return;
}
ldb.get(key_prefix, function(err, data){
if(err && err.notFound){
return callback();
}else if(err){
return callback(err);
var delPVar = function (ldb, keyPrefix, query, callback) {
var path = ktypes.isNull(query) ? [] : toKeyPath(query)
if (_.size(path) > 0) {
keyPrefix = keyPrefix.concat(['value', _.head(path)])
var subPath = _.tail(path)
if (!_.isEmpty(subPath)) {
ldb.get(keyPrefix, function (err, data) {
if (err && err.notFound) {
data = {}
} else if (err) {
callback(err)
return
}
if(data.type !== "Map" && data.type !== "Array"){
return callback(null, data.value);
var val = _.omit(data, subPath)
if (_.isEmpty(val)) {
ldb.del(keyPrefix, callback)
} else {
ldb.put(keyPrefix, val, callback)
}
var value = data.type === "Array" ? [] : {};
dbRange(ldb, {
prefix: key_prefix,
}, function(data){
if(data.key.length === (key_prefix.length + 2)){
value[data.key[key_prefix.length + 1]] = data.value;
}
}, function(err){
callback(err, value);
});
});
};
})
return
}
}
var dbOps = []
dbRange(ldb, {
prefix: keyPrefix,
values: false
}, function (key) {
dbOps.push({type: 'del', key: key})
}, function (err) {
if (err) return callback(err)
ldb.batch(dbOps, callback)
})
}
module.exports = function (opts) {
var ldb = levelup(encode(opts.db, {
keyEncoding: bytewise,
valueEncoding: safeJsonCodec
}))
var delPVar = function(ldb, key_prefix, query, callback){
var path = ktypes.isNull(query) ? [] : toKeyPath(query);
if(_.size(path) > 0){
key_prefix = key_prefix.concat(["value", _.head(path)]);
var sub_path = _.tail(path);
if( ! _.isEmpty(sub_path)){
ldb.get(key_prefix, function(err, data){
if(err && err.notFound){
data = {};
}else if(err){
callback(err);
return;
}
var val = _.omit(data, sub_path);
if(_.isEmpty(val)){
ldb.del(key_prefix, callback);
}else{
ldb.put(key_prefix, val, callback);
}
});
return;
var newID = cuid
var genDID = sovrinDID.gen
if (opts.__use_sequential_ids_for_testing) {
newID = (function () {
var prefix = opts.__sequential_id_prefix_for_testing || 'id'
var i = 0
return function () {
return prefix + i++
}
}())
genDID = function () {
var id = newID()
return {
did: id,
verifyKey: 'verifyKey_' + id,
secret: {
seed: 'seed_' + id,
signKey: 'signKey_' + id
}
}
}
var db_ops = [];
}
var getMigrationLog = function (callback) {
var log = {}
dbRange(ldb, {
prefix: key_prefix,
values: false,
}, function(key){
db_ops.push({type: "del", key: key});
}, function(err){
ldb.batch(db_ops, callback);
});
};
prefix: ['migration-log']
}, function (data) {
log[data.key[1]] = data.value
}, function (err) {
callback(err, log)
})
}
var recordMigration = function (version, callback) {
ldb.put(['migration-log', version + ''], {
timestamp: (new Date()).toISOString()
}, callback)
}
var removeMigration = function (version, callback) {
ldb.del(['migration-log', version + ''], callback)
}
module.exports = function(opts){
var ldb = levelup(encode(opts.db, {
keyEncoding: bytewise,
valueEncoding: safeJsonCodec
}));
var newID = cuid;
var genDID = sovrinDID.gen;
if(opts.__use_sequential_ids_for_testing){
newID = (function(){
var prefix = opts.__sequential_id_prefix_for_testing || "id";
var i = 0;
return function(){
return prefix + i++;
};
}());
genDID = function(){
var id = newID();
return {
did: id,
verifyKey: "verifyKey_" + id,
secret: {
seed: "seed_" + id,
signKey: "signKey_" + id,
},
};
};
function newChannelBase (opts) {
var did = genDID()
var channel = {
id: did.did,
pico_id: opts.pico_id,
name: opts.name,
type: opts.type,
policy_id: opts.policy_id,
sovrin: did
}
var getMigrationLog = function(callback){
var log = {};
dbRange(ldb, {
prefix: ["migration-log"],
}, function(data){
log[data.key[1]] = data.value;
}, function(err){
callback(err, log);
});
};
var recordMigration = function(version, callback){
ldb.put(["migration-log", version + ""], {
timestamp: (new Date()).toISOString(),
}, callback);
};
var removeMigration = function(version, callback){
ldb.del(["migration-log", version + ""], callback);
};
var newChannel_base = function(opts){
var did = genDID();
var channel = {
id: did.did,
pico_id: opts.pico_id,
name: opts.name,
type: opts.type,
policy_id: opts.policy_id,
sovrin: did,
};
var db_ops = [
{
type: "put",
key: ["channel", channel.id],
value: channel,
},
{
type: "put",
key: ["pico-eci-list", channel.pico_id, channel.id],
value: true,
}
];
return {
channel: channel,
db_ops: db_ops,
};
};
var dbOps = [
{
type: 'put',
key: ['channel', channel.id],
value: channel
},
{
type: 'put',
key: ['pico-eci-list', channel.pico_id, channel.id],
value: true
}
]
return {
toObj: function(callback){
var dump = {};
dbRange(ldb, {}, function(data){
if(!_.isArray(data.key)){
return;
}
_.set(dump, data.key, data.value);
}, function(err){
callback(err, dump);
});
},
channel: channel,
dbOps: dbOps
}
}
return {
toObj: function (callback) {
var dump = {}
dbRange(ldb, {}, function (data) {
if (!_.isArray(data.key)) {
return
}
_.set(dump, data.key, data.value)
}, function (err) {
callback(err, dump)
})
},
///////////////////////////////////////////////////////////////////////
//
// Picos
//
getPicoIDByECI: function(eci, callback){
eci = ktypes.toString(eci);
ldb.get(["channel", eci], function(err, data){
if(err && err.notFound){
err = new levelup.errors.NotFoundError("ECI not found: " + eci);
err.notFound = true;
}
callback(err, data && data.pico_id);
});
},
/// ////////////////////////////////////////////////////////////////////
//
// Picos
//
getPicoIDByECI: function (eci, callback) {
eci = ktypes.toString(eci)
ldb.get(['channel', eci], function (err, data) {
if (err && err.notFound) {
err = new levelup.errors.NotFoundError('ECI not found: ' + eci)
err.notFound = true
}
callback(err, data && data.pico_id)
})
},
assertPicoID: function (id, callback) {
id = ktypes.toString(id)
ldb.get(['pico', id], function (err) {
if (err && err.notFound) {
err = new levelup.errors.NotFoundError('Pico not found: ' + id)
err.notFound = true
}
callback(err, err ? null : id)
})
},
assertPicoID: function(id, callback){
id = ktypes.toString(id);
ldb.get(["pico", id], function(err){
if(err && err.notFound){
err = new levelup.errors.NotFoundError("Pico not found: " + id);
err.notFound = true;
}
callback(err, err ? null : id);
});
},
decryptChannelMessage: function (eci, encryptedMessage, nonce, otherPublicKey, callback) {
eci = ktypes.toString(eci)
encryptedMessage = ktypes.toString(encryptedMessage)
nonce = ktypes.toString(nonce)
otherPublicKey = ktypes.toString(otherPublicKey)
ldb.get(['channel', eci], function (err, channel) {
if (err) {
if (err.notFound) {
err = new levelup.errors.NotFoundError('ECI not found: ' + eci)
err.notFound = true
}
callback(err)
return
}
var decryptedMessage
try {
var sharedSecret = channel.sovrin.sharedSecret
if (!sharedSecret) {
var privateKey = channel.sovrin.secret.encryptionPrivateKey
sharedSecret = sovrinDID.getSharedSecret(otherPublicKey, privateKey)
ldb.put(['channel', eci, 'sovrin', 'secret', 'sharedSecret'], bs58.encode(sharedSecret), function (err) {
if (err) {
callback(err)
}
})
} else {
sharedSecret = bs58.decode(sharedSecret)
}
encryptedMessage = bs58.decode(encryptedMessage)
nonce = bs58.decode(nonce)
decryptedMessage = sovrinDID.decryptMessage(encryptedMessage, nonce, sharedSecret)
if (decryptedMessage === false) throw new Error('failed to decryptedMessage')
} catch (e) {
// Failed to decrypt message
callback(null, false)
return
}
decryptChannelMessage: function(eci, encryptedMessage, nonce, otherPublicKey, callback) {
eci = ktypes.toString(eci);
encryptedMessage = ktypes.toString(encryptedMessage);
nonce = ktypes.toString(nonce);
otherPublicKey = ktypes.toString(otherPublicKey);
ldb.get(["channel", eci], function (err, channel) {
if (err) {
if (err.notFound) {
err = new levelup.errors.NotFoundError("ECI not found: " + eci);
err.notFound = true;
}
callback(err);
return;
}
var decryptedMessage;
try {
var sharedSecret = channel.sovrin.sharedSecret;
if (!sharedSecret) {
var privateKey = channel.sovrin.secret.encryptionPrivateKey;
sharedSecret = sovrinDID.getSharedSecret(otherPublicKey, privateKey);
ldb.put(["channel", eci, "sovrin", "secret", "sharedSecret"], bs58.encode(sharedSecret), function(err){
if (err) {
callback(err);
}
});
} else {
sharedSecret = bs58.decode(sharedSecret);
}
encryptedMessage = bs58.decode(encryptedMessage);
nonce = bs58.decode(nonce);
decryptedMessage = sovrinDID.decryptMessage(encryptedMessage, nonce, sharedSecret);
if(decryptedMessage === false) throw "failed";
} catch(e) {
// Failed to decrypt message
callback(null, false);
return;
}
callback(null, decryptedMessage)
})
},
encryptChannelMessage: function (eci, message, otherPublicKey, callback) {
eci = ktypes.toString(eci)
message = ktypes.toString(message)
otherPublicKey = ktypes.toString(otherPublicKey)
ldb.get(['channel', eci], function (err, channel) {
if (err) {
if (err.notFound) {
err = new levelup.errors.NotFoundError('ECI not found: ' + eci)
err.notFound = true
}
callback(err)
return
}
var privateKey = channel.sovrin.secret.encryptionPrivateKey
privateKey = bs58.decode(privateKey)
var sharedSecret = channel.sovrin.sharedSecret
if (!sharedSecret) {
sharedSecret = sovrinDID.getSharedSecret(otherPublicKey, privateKey)
ldb.put(['channel', eci, 'sovrin', 'secret', 'sharedSecret'], bs58.encode(sharedSecret), function (err) {
if (err) {
callback(err)
}
})
} else {
sharedSecret = bs58.decode(sharedSecret)
}
var nonce = sovrinDID.getNonce()
var encryptedMessage = sovrinDID.encryptMessage(message, nonce, sharedSecret)
callback(null, decryptedMessage);
});
if (encryptedMessage === false) {
callback(new Error('Failed to encrypt message'))
return
}
},
encryptChannelMessage: function(eci, message, otherPublicKey, callback){
eci = ktypes.toString(eci);
message = ktypes.toString(message);
otherPublicKey = ktypes.toString(otherPublicKey);
ldb.get(["channel", eci], function (err, channel){
if(err){
if(err.notFound){
err = new levelup.errors.NotFoundError("ECI not found: " + eci);
err.notFound = true;
}
callback(err);
return;
}
var privateKey = channel.sovrin.secret.encryptionPrivateKey;
privateKey = bs58.decode(privateKey);
var sharedSecret = channel.sovrin.sharedSecret;
if (!sharedSecret) {
sharedSecret = sovrinDID.getSharedSecret(otherPublicKey, privateKey);
ldb.put(["channel", eci, "sovrin", "secret", "sharedSecret"], bs58.encode(sharedSecret), function (err) {
if (err) {
callback(err);
}
});
}
else {
sharedSecret = bs58.decode(sharedSecret);
}
var nonce = sovrinDID.getNonce();
var encryptedMessage = sovrinDID.encryptMessage(message, nonce, sharedSecret);
var returnObj = {}
returnObj.encryptedMessage = bs58.encode(encryptedMessage)
returnObj.nonce = bs58.encode(nonce)
callback(null, returnObj)
})
},
if (encryptedMessage === false) {
callback(new Error("Failed to encrypt message"));
return;
}
signChannelMessage: function (eci, message, callback) {
eci = ktypes.toString(eci)
message = ktypes.toString(message)
ldb.get(['channel', eci], function (err, channel) {
if (err) {
if (err.notFound) {
err = new levelup.errors.NotFoundError('ECI not found: ' + eci)
err.notFound = true
}
callback(err)
return
}
var signKey = channel.sovrin.secret.signKey
var verifyKey = channel.sovrin.verifyKey
var signedMessage = sovrinDID.signMessage(message, signKey, verifyKey)
if (signedMessage === false) {
callback(new Error('Failed to sign message'))
return
}
signedMessage = bs58.encode(signedMessage)
callback(null, signedMessage)
})
},
var returnObj = {};
returnObj.encryptedMessage = bs58.encode(encryptedMessage);
returnObj.nonce = bs58.encode(nonce);
callback(null, returnObj);
});
},
getRootPico: function (callback) {
ldb.get(['root_pico'], callback)
},
signChannelMessage: function(eci, message, callback){
eci = ktypes.toString(eci);
message = ktypes.toString(message);
ldb.get(["channel", eci], function (err, channel){
if(err){
if(err.notFound){
err = new levelup.errors.NotFoundError("ECI not found: " + eci);
err.notFound = true;
}
callback(err);
return;
}
var signKey = channel.sovrin.secret.signKey;
var verifyKey = channel.sovrin.verifyKey;
var signedMessage = sovrinDID.signMessage(message, signKey, verifyKey);
if (signedMessage === false) {
callback(new Error("Failed to sign message"));
return;
}
signedMessage = bs58.encode(signedMessage);
callback(null, signedMessage);
});
},
getParent: function (picoId, callback) {
ldb.get(['pico', picoId], function (err, data) {
callback(err, (data && data.parent_id) || null)
})
},
getAdminECI: function (picoId, callback) {
ldb.get(['pico', picoId], function (err, data) {
callback(err, (data && data.admin_eci) || null)
})
},
listChildren: function (picoId, callback) {
var children = []
dbRange(ldb, {
prefix: ['pico-children', picoId],
values: false
}, function (key) {
children.push(key[2])
}, function (err) {
callback(err, children)
})
},
getRootPico: function(callback){
ldb.get(["root_pico"], callback);
},
newPico: function (opts, callback) {
var newPico = {
id: newID(),
parent_id: _.isString(opts.parent_id) && opts.parent_id.length > 0
? opts.parent_id
: null
}
var c = newChannelBase({
pico_id: newPico.id,
name: 'admin',
type: 'secret',
policy_id: ADMIN_POLICY_ID
})
newPico.admin_eci = c.channel.id
getParent: function(pico_id, callback){
ldb.get(["pico", pico_id], function(err, data){
callback(err, (data && data.parent_id) || null);
});
},
getAdminECI: function(pico_id, callback){
ldb.get(["pico", pico_id], function(err, data){
callback(err, (data && data.admin_eci) || null);
});
},
listChildren: function(pico_id, callback){
var children = [];
dbRange(ldb, {
prefix: ["pico-children", pico_id],
values: false,
}, function(key){
children.push(key[2]);
}, function(err){
callback(err, children);
});
},
var dbOps = c.dbOps
dbOps.push({
type: 'put',
key: ['pico', newPico.id],
value: newPico
})
if (newPico.parent_id) {
dbOps.push({
type: 'put',
key: ['pico-children', newPico.parent_id, newPico.id],
value: true
})
} else {
dbOps.push({
type: 'put',
key: ['root_pico'],
value: newPico
})
}
newPico: function(opts, callback){
ldb.batch(dbOps, function (err) {
if (err) return callback(err)
callback(null, newPico)
})
},
var new_pico = {
id: newID(),
parent_id: _.isString(opts.parent_id) && opts.parent_id.length > 0
? opts.parent_id
: null,
};
removePico: function (id, callback) {
var dbOps = []
var c = newChannel_base({
pico_id: new_pico.id,
name: "admin",
type: "secret",
policy_id: ADMIN_POLICY_ID,
});
new_pico.admin_eci = c.channel.id;
var keyRange = function (prefix, fn) {
return async.apply(dbRange, ldb, {
prefix: prefix,
values: false
}, fn)
}
var db_ops = c.db_ops;
db_ops.push({
type: "put",
key: ["pico", new_pico.id],
value: new_pico,
});
if(new_pico.parent_id){
db_ops.push({
type: "put",
key: ["pico-children", new_pico.parent_id, new_pico.id],
value: true,
});
}else{
db_ops.push({
type: "put",
key: ["root_pico"],
value: new_pico,
});
async.series([
keyRange(['pico', id], function (key) {
dbOps.push({type: 'del', key: key})
}),
keyRange(['pico-eci-list', id], function (key) {
var eci = key[2]
dbOps.push({type: 'del', key: key})
dbOps.push({type: 'del', key: ['channel', eci]})
}),
keyRange(['entvars', id], function (key) {
dbOps.push({type: 'del', key: key})
}),
keyRange(['pico-ruleset', id], function (key) {
dbOps.push({type: 'del', key: key})
dbOps.push({type: 'del', key: ['ruleset-pico', key[2], key[1]]})
}),
keyRange(['pico-children', id], function (key) {
dbOps.push({type: 'del', key: key})
}),
function (next) {
ldb.get(['pico', id], function (err, pico) {
if ((err && err.notFound) || !pico) {
next()
return
}
if (err) return next(err)
if (pico.parent_id) {
keyRange(['pico-children', pico.parent_id, id], function (key) {
dbOps.push({type: 'del', key: key})
})(next)
return
}
ldb.get(['root_pico'], function (err, data) {
if (err) return next(err)
if (data.id === id) {
dbOps.push({type: 'del', key: ['root_pico']})
}
next()
})
})
}
], function (err) {
if (err) return callback(err)
ldb.batch(dbOps, callback)
})
},
ldb.batch(db_ops, function(err){
if(err) return callback(err);
callback(undefined, new_pico);
});
/// /////////////////////////////////////////////////////////////////////
//
// installed rulesets
//
ridsOnPico: function (picoId, callback) {
var picoRids = {}
dbRange(ldb, {
prefix: ['pico-ruleset', picoId]
}, function (data) {
var rid = data.key[2]
if (data.value && data.value.on === true) {
picoRids[rid] = true
}
}, function (err) {
callback(err, picoRids)
})
},
addRulesetToPico: function (picoId, rid, callback) {
var val = {
on: true
}
ldb.batch([
{
type: 'put',
key: ['pico-ruleset', picoId, rid],
value: val
},
{
type: 'put',
key: ['ruleset-pico', rid, picoId],
value: val
}
], callback)
},
removeRulesetFromPico: function (picoId, rid, callback) {
var dbOps = [
{type: 'del', key: ['pico-ruleset', picoId, rid]},
{type: 'del', key: ['ruleset-pico', rid, picoId]}
]
dbRange(ldb, {
prefix: ['entvars', picoId, rid],
values: false
}, function (key) {
dbOps.push({type: 'del', key: key})
}, function (err) {
if (err) return callback(err)
ldb.batch(dbOps, callback)
})
},
/// /////////////////////////////////////////////////////////////////////
//
// channels
//
newChannel: function (opts, callback) {
var c = newChannelBase(opts)
ldb.batch(c.dbOps, function (err) {
if (err) return callback(err)
callback(null, omitChannelSecret(c.channel))
})
},
listChannels: function (picoId, callback) {
var eciList = []
dbRange(ldb, {
prefix: ['pico-eci-list', picoId],
values: false
}, function (key) {
eciList.push(key[2])
}, function (err) {
if (err) return callback(err)
async.map(eciList, function (eci, next) {
ldb.get(['channel', eci], function (err, channel) {
if (err) return next(err)
next(null, omitChannelSecret(channel))
})
}, callback)
})
},
removeChannel: function (eci, callback) {
ldb.get(['channel', eci], function (err, data) {
if (err) return callback(err)
removePico: function(id, callback){
var db_ops = [];
ldb.get(['pico', data.pico_id], function (err, pico) {
if (err) return callback(err)
if (pico.admin_eci === eci) {
callback(new Error("Cannot delete the pico's admin channel"))
return
}
var dbOps = [
{type: 'del', key: ['channel', eci]},
{type: 'del', key: ['pico-eci-list', pico.id, eci]}
]
ldb.batch(dbOps, callback)
})
})
},
var keyRange = function(prefix, fn){
return async.apply(dbRange, ldb, {
prefix: prefix,
values: false,
}, fn);
};
getChannelAndPolicy: function (eci, callback) {
ldb.get(['channel', eci], function (err, data) {
if (err) {
if (err.notFound) {
err = new levelup.errors.NotFoundError('ECI not found: ' + ktypes.toString(eci))
err.notFound = true
}
callback(err)
return
}
var chann = omitChannelSecret(data)
ldb.get(['policy', chann.policy_id], function (err, data) {
if (err) return callback(err)
chann.policy = data
callback(null, chann)
})
})
},
async.series([
keyRange(["pico", id], function(key){
db_ops.push({type: "del", key: key});
}),
keyRange(["pico-eci-list", id], function(key){
var eci = key[2];
db_ops.push({type: "del", key: key});
db_ops.push({type: "del", key: ["channel", eci]});
}),
keyRange(["entvars", id], function(key){
db_ops.push({type: "del", key: key});
}),
keyRange(["pico-ruleset", id], function(key){
db_ops.push({type: "del", key: key});
db_ops.push({type: "del", key: ["ruleset-pico", key[2], key[1]]});
}),
keyRange(["pico-children", id], function(key){
db_ops.push({type: "del", key: key});
}),
function(next){
ldb.get(["pico", id], function(err, pico){
if((err && err.notFound) || !pico){
next();
return;
}
if(err) return next(err);
if(pico.parent_id){
keyRange(["pico-children", pico.parent_id, id], function(key){
db_ops.push({type: "del", key: key});
})(next);
return;
}
ldb.get(["root_pico"], function(err, data){
if(err) return next(err);
if(data.id === id){
db_ops.push({type: "del", key: ["root_pico"]});
}
next();
});
});
},
], function(err){
if(err)return callback(err);
ldb.batch(db_ops, callback);
});
},
newPolicy: function (policy, callback) {
var newPolicy = ChannelPolicy.clean(policy)
newPolicy.id = newID()
ldb.put(['policy', newPolicy.id], newPolicy, function (err, data) {
callback(err, newPolicy)
})
},
listPolicies: function (callback) {
var list = []
dbRange(ldb, {
prefix: ['policy'],
keys: false
}, function (value) {
list.push(value)
}, function (err) {
callback(err, list)
})
},
////////////////////////////////////////////////////////////////////////
//
// installed rulesets
//
ridsOnPico: function(pico_id, callback){
var pico_rids = {};
dbRange(ldb, {
prefix: ["pico-ruleset", pico_id]
}, function(data){
var rid = data.key[2];
if(data.value && data.value.on === true){
pico_rids[rid] = true;
}
}, function(err){
callback(err, pico_rids);
});
},
addRulesetToPico: function(pico_id, rid, callback){
var val = {
on: true,
};
ldb.batch([
{
type: "put",
key: ["pico-ruleset", pico_id, rid],
value: val,
},
{
type: "put",
key: ["ruleset-pico", rid, pico_id],
value: val,
}
], callback);
},
removeRulesetFromPico: function(pico_id, rid, callback){
var db_ops = [
{type: "del", key: ["pico-ruleset", pico_id, rid]},
{type: "del", key: ["ruleset-pico", rid, pico_id]},
];
dbRange(ldb, {
prefix: ["entvars", pico_id, rid],
values: false,
}, function(key){
db_ops.push({type: "del", key: key});
}, function(err){
if(err) return callback(err);
ldb.batch(db_ops, callback);
});
},
assertPolicyID: function (id, callback) {
id = ktypes.toString(id)
ldb.get(['policy', id], function (err) {
if (err && err.notFound) {
err = new levelup.errors.NotFoundError('Policy not found: ' + id)
err.notFound = true
}
callback(err, err ? null : id)
})
},
removePolicy: function (id, callback) {
id = ktypes.toString(id)
ldb.get(['policy', id], function (err, policy) {
if (err && err.notFound) {
err = new levelup.errors.NotFoundError('Policy not found: ' + id)
err.notFound = true
}
if (err) return callback(err)
////////////////////////////////////////////////////////////////////////
//
// channels
//
newChannel: function(opts, callback){
var c = newChannel_base(opts);
ldb.batch(c.db_ops, function(err){
if(err) return callback(err);
callback(null, omitChannelSecret(c.channel));
});
},
listChannels: function(pico_id, callback){
var eci_list = [];
dbRange(ldb, {
prefix: ["pico-eci-list", pico_id],
values: false,
}, function(key){
eci_list.push(key[2]);
}, function(err){
if(err) return callback(err);
async.map(eci_list, function(eci, next){
ldb.get(["channel", eci], function(err, channel){
if(err) return next(err);
next(null, omitChannelSecret(channel));
});
}, callback);
});
},
removeChannel: function(eci, callback){
ldb.get(["channel", eci], function(err, data){
if(err) return callback(err);
var isUsed = false
dbRange(ldb, {
prefix: ['channel'],
keys: false
}, function (chann, stopRange) {
if (chann.policy_id === id) {
isUsed = true
stopRange()
}
}, function (err) {
if (err) return callback(err)
if (isUsed) {
return callback(new Error('Policy ' + id + ' is in use, cannot remove.'))
}
ldb.del(['policy', id], callback)
})
})
},
ldb.get(["pico", data.pico_id], function(err, pico){
if(err) return callback(err);
if(pico.admin_eci === eci){
callback(new Error("Cannot delete the pico's admin channel"));
return;
}
var db_ops = [
{type: "del", key: ["channel", eci]},
{type: "del", key: ["pico-eci-list", pico.id, eci]}
];
ldb.batch(db_ops, callback);
});
});
},
/// /////////////////////////////////////////////////////////////////////
//
// ent:*
//
putEntVar: function (picoId, rid, varName, query, val, callback) {
putPVar(ldb, ['entvars', picoId, rid, varName], query, val, callback)
},
getEntVar: function (picoId, rid, varName, query, callback) {
getPVar(ldb, ['entvars', picoId, rid, varName], query, callback)
},
delEntVar: function (picoId, rid, varName, query, callback) {
delPVar(ldb, ['entvars', picoId, rid, varName], query, callback)
},
getChannelAndPolicy: function(eci, callback){
ldb.get(["channel", eci], function(err, data){
if(err){
if(err.notFound){
err = new levelup.errors.NotFoundError("ECI not found: " + ktypes.toString(eci));
err.notFound = true;
}
callback(err);
return;
}
var chann = omitChannelSecret(data);
ldb.get(["policy", chann.policy_id], function(err, data){
if(err) return callback(err);
chann.policy = data;
callback(null, chann);
});
});
},
/// /////////////////////////////////////////////////////////////////////
//
// app:*
//
putAppVar: function (rid, varName, query, val, callback) {
putPVar(ldb, ['appvars', rid, varName], query, val, callback)
},
getAppVar: function (rid, varName, query, callback) {
getPVar(ldb, ['appvars', rid, varName], query, callback)
},
delAppVar: function (rid, varName, query, callback) {
delPVar(ldb, ['appvars', rid, varName], query, callback)
},
newPolicy: function(policy, callback){
var new_policy = ChannelPolicy.clean(policy);
new_policy.id = newID();
ldb.put(["policy", new_policy.id], new_policy, function(err, data){
callback(err, new_policy);
});
},
/// /////////////////////////////////////////////////////////////////////
//
// event state machine and aggregators
//
getStateMachine: function (picoId, rule, callback) {
var key = ['state_machine', picoId, rule.rid, rule.name]
ldb.get(key, function (err, data) {
if (err && !err.notFound) {
return callback(err)
}
callback(null, _.has(rule.select.state_machine, data && data.state)
? data
: {state: 'start'})
})
},
putStateMachine: function (picoId, rule, data, callback) {
var key = ['state_machine', picoId, rule.rid, rule.name]
ldb.put(key, data, callback)
},
listPolicies: function(callback){
var list = [];
dbRange(ldb, {
prefix: ["policy"],
keys: false,
}, function(value){
list.push(value);
}, function(err){
callback(err, list);
});
},
updateAggregatorVar: function (picoId, rule, varKey, updater, callback) {
var key = [
'aggregator_var',
picoId,
rule.rid,
rule.name,
varKey
]
ldb.get(key, function (err, val) {
if (err && !err.notFound) {
return callback(err)
}
if (!_.isArray(val)) {
val = []
}
val = updater(val)
if (!_.isArray(val)) {
val = []
}
ldb.put(key, val, function (err) {
callback(err, val)
})
})
},
assertPolicyID: function(id, callback){
id = ktypes.toString(id);
ldb.get(["policy", id], function(err){
if(err && err.notFound){
err = new levelup.errors.NotFoundError("Policy not found: " + id);
err.notFound = true;
}
callback(err, err ? null : id);
});
},
/// /////////////////////////////////////////////////////////////////////
//
// rulesets
//
storeRuleset: function (krlSrc, meta, callback) {
var timestamp = (new Date()).toISOString()
if (arguments.length === 4 && _.isString(arguments[3])) { // for testing only
timestamp = arguments[3]// for testing only
}// for testing only
removePolicy: function(id, callback){
id = ktypes.toString(id);
ldb.get(["policy", id], function(err, policy){
if(err && err.notFound){
err = new levelup.errors.NotFoundError("Policy not found: " + id);
err.notFound = true;
}
if(err) return callback(err);
var rid = extractRulesetID(krlSrc)
if (!rid) {
callback(new Error('Ruleset name not found'))
return
}
var shasum = crypto.createHash('sha256')
shasum.update(krlSrc)
var hash = shasum.digest('hex')
var is_used = false;
dbRange(ldb, {
prefix: ["channel"],
keys: false,
}, function(chann, stopRange){
if(chann.policy_id === id){
is_used = true;
stopRange();
return;
}
}, function(err){
if(err) return callback(err);
if(is_used){
return callback(new Error("Policy " + id + " is in use, cannot remove."));
}
ldb.del(["policy", id], callback);
});
});
},
var url = _.has(meta, 'url') && _.isString(meta.url)
? meta.url
: null
////////////////////////////////////////////////////////////////////////
//
// ent:*
//
putEntVar: function(pico_id, rid, var_name, query, val, callback){
putPVar(ldb, ["entvars", pico_id, rid, var_name], query, val, callback);
var dbOps = [
{
// the source of truth for a ruleset version
type: 'put',
key: ['rulesets', 'krl', hash],
value: {
src: krlSrc,
rid: rid,
url: url,
timestamp: timestamp
}
},
getEntVar: function(pico_id, rid, var_name, query, callback){
getPVar(ldb, ["entvars", pico_id, rid, var_name], query, callback);
},
delEntVar: function(pico_id, rid, var_name, query, callback){
delPVar(ldb, ["entvars", pico_id, rid, var_name], query, callback);
},
{
// index to view all the versions of a given ruleset name
type: 'put',
key: ['rulesets', 'versions', rid, timestamp, hash],
value: true
}
]
if (url) {
// index to lookup by url
dbOps.push({
type: 'put',
key: ['rulesets', 'url', url.toLowerCase().trim(), rid, hash],
value: true
})
}
ldb.batch(dbOps, function (err) {
if (err) return callback(err)
callback(null, {
rid: rid,
hash: hash
})
})
},
hasEnabledRid: function (rid, callback) {
var hasFound = false
dbRange(ldb, {
prefix: ['rulesets', 'enabled', rid],
values: false,
limit: 1
}, function (key) {
hasFound = true
}, function (err) {
callback(err, hasFound)
})
},
findRulesetsByURL: function (url, callback) {
var r = []
dbRange(ldb, {
prefix: ['rulesets', 'url', url.toLowerCase().trim()]
}, function (data) {
if (data.value) {
r.push({
rid: data.key[3],
hash: data.key[4]
})
}
}, function (err) {
if (err) return callback(err)
callback(null, r)
})
},
enableRuleset: function (hash, callback) {
ldb.get(['rulesets', 'krl', hash], function (err, data) {
if (err) return callback(err)
ldb.put(['rulesets', 'enabled', data.rid], {
hash: hash,
timestamp: (new Date()).toISOString()
}, callback)
})
},
disableRuleset: function (rid, callback) {
ldb.del(['rulesets', 'enabled', rid], callback)
},
getEnabledRuleset: function (rid, callback) {
ldb.get(['rulesets', 'enabled', rid], function (err, dataE) {
if (err) return callback(err)
ldb.get(['rulesets', 'krl', dataE.hash], function (err, dataK) {
if (err) return callback(err)
callback(null, {
src: dataK.src,
hash: dataE.hash,
rid: dataK.rid,
url: dataK.url,
timestamp_stored: dataK.timestamp,
timestamp_enable: dataE.timestamp
})
})
})
},
listAllEnabledRIDs: function (callback) {
var rids = []
dbRange(ldb, {
prefix: ['rulesets', 'enabled'],
values: false
}, function (key) {
rids.push(key[2])
}, function (err) {
callback(err, rids)
})
},
isRulesetUsed: function (rid, callback) {
var isUsed = false
dbRange(ldb, {
prefix: ['ruleset-pico', rid],
values: false,
limit: 1
}, function (key) {
isUsed = true
}, function (err) {
callback(err, isUsed)
})
},
deleteRuleset: function (rid, callback) {
var toDel = [
['rulesets', 'enabled', rid]
]
////////////////////////////////////////////////////////////////////////
//
// app:*
//
putAppVar: function(rid, var_name, query, val, callback){
putPVar(ldb, ["appvars", rid, var_name], query, val, callback);
},
getAppVar: function(rid, var_name, query, callback){
getPVar(ldb, ["appvars", rid, var_name], query, callback);
},
delAppVar: function(rid, var_name, query, callback){
delPVar(ldb, ["appvars", rid, var_name], query, callback);
},
var hashes = []
dbRange(ldb, {
prefix: ['rulesets', 'versions', rid],
values: false
}, function (key) {
var hash = key[4]
////////////////////////////////////////////////////////////////////////
//
// event state machine and aggregators
//
getStateMachine: function(pico_id, rule, callback){
var key = ["state_machine", pico_id, rule.rid, rule.name];
ldb.get(key, function(err, data){
if(err && !err.notFound){
return callback(err);
}
callback(undefined, _.has(rule.select.state_machine, data && data.state)
? data
: {state: "start"});
});
},
putStateMachine: function(pico_id, rule, data, callback){
var key = ["state_machine", pico_id, rule.rid, rule.name];
ldb.put(key, data, callback);
},
updateAggregatorVar: function(pico_id, rule, var_key, updater, callback){
var key = [
"aggregator_var",
pico_id,
rule.rid,
rule.name,
var_key
];
ldb.get(key, function(err, val){
if(err && !err.notFound){
return callback(err);
}
if(!_.isArray(val)){
val = [];
}
val = updater(val);
if(!_.isArray(val)){
val = [];
}
ldb.put(key, val, function(err){
callback(err, val);
});
});
},
////////////////////////////////////////////////////////////////////////
//
// rulesets
//
storeRuleset: function(krl_src, meta, callback){
var timestamp = (new Date()).toISOString();
if(arguments.length === 4 && _.isString(arguments[3])){//for testing only
timestamp = arguments[3];//for testing only
}//for testing only
var rid = extractRulesetID(krl_src);
if(!rid){
callback(new Error("Ruleset name not found"));
return;
toDel.push(key)
toDel.push(['rulesets', 'krl', hash])
hashes.push(hash)
}, function (err) {
if (err) return callback(err)
async.each(hashes, function (hash, next) {
ldb.get(['rulesets', 'krl', hash], function (err, data) {
if (err) return next(err)
if (_.isString(data.url)) {
toDel.push([
'rulesets',
'url',
data.url.toLowerCase().trim(),
data.rid,
hash
])
}
var shasum = crypto.createHash("sha256");
shasum.update(krl_src);
var hash = shasum.digest("hex");
next()
})
}, function (err) {
if (err) return callback(err)
var url = _.has(meta, "url") && _.isString(meta.url)
? meta.url
: null;
dbRange(ldb, {
prefix: ['appvars', rid],
values: false
}, function (key) {
toDel.push(key)
}, function (err) {
if (err) return callback(err)
var db_ops = [
{
//the source of truth for a ruleset version
type: "put",
key: ["rulesets", "krl", hash],
value: {
src: krl_src,
rid: rid,
url: url,
timestamp: timestamp
}
},
{
//index to view all the versions of a given ruleset name
type: "put",
key: ["rulesets", "versions", rid, timestamp, hash],
value: true
}
];
if(url){
//index to lookup by url
db_ops.push({
type: "put",
key: ["rulesets", "url", url.toLowerCase().trim(), rid, hash],
value: true
});
}
ldb.batch(db_ops, function(err){
if(err) return callback(err);
callback(undefined, {
rid: rid,
hash: hash,
});
});
},
hasEnabledRid: function(rid, callback){
var has_found = undefined;
dbRange(ldb, {
prefix: ["rulesets", "enabled", rid],
values: false,
limit: 1
}, function(key){
has_found = true;
}, function(err){
callback(err, has_found);
});
},
findRulesetsByURL: function(url, callback){
var r = [];
dbRange(ldb, {
prefix: ["rulesets", "url", url.toLowerCase().trim()],
}, function(data){
if(data.value){
r.push({
rid: data.key[3],
hash: data.key[4],
});
}
}, function(err){
if(err)return callback(err);
callback(null, r);
});
},
enableRuleset: function(hash, callback){
ldb.get(["rulesets", "krl", hash], function(err, data){
if(err) return callback(err);
ldb.put(["rulesets", "enabled", data.rid], {
hash: hash,
timestamp: (new Date()).toISOString()
}, callback);
});
},
disableRuleset: function(rid, callback){
ldb.del(["rulesets", "enabled", rid], callback);
},
getEnabledRuleset: function(rid, callback){
ldb.get(["rulesets", "enabled", rid], function(err, data_e){
if(err) return callback(err);
ldb.get(["rulesets", "krl", data_e.hash], function(err, data_k){
if(err) return callback(err);
callback(undefined, {
src: data_k.src,
hash: data_e.hash,
rid: data_k.rid,
url: data_k.url,
timestamp_stored: data_k.timestamp,
timestamp_enable: data_e.timestamp
});
});
});
},
listAllEnabledRIDs: function(callback){
var rids = [];
dbRange(ldb, {
prefix: ["rulesets", "enabled"],
values: false
}, function(key){
rids.push(key[2]);
}, function(err){
callback(err, rids);
});
},
isRulesetUsed: function(rid, callback){
var is_used = false;
dbRange(ldb, {
prefix: ["ruleset-pico", rid],
values: false,
limit: 1,
}, function(key){
is_used = true;
}, function(err){
callback(err, is_used);
});
},
deleteRuleset: function(rid, callback){
var to_del = [
["rulesets", "enabled", rid],
];
ldb.batch(_.map(toDel, function (key) {
return {type: 'del', key: key}
}), callback)
})
})
})
},
var hashes = [];
dbRange(ldb, {
prefix: ["rulesets", "versions", rid],
values: false
}, function(key){
var hash = key[4];
/// /////////////////////////////////////////////////////////////////////
//
// scheduled events
//
scheduleEventAt: function (at, event, callback) {
var id = newID()
to_del.push(key);
to_del.push(["rulesets", "krl", hash]);
hashes.push(hash);
}, function(err){
if(err) return callback(err);
async.each(hashes, function(hash, next){
ldb.get(["rulesets", "krl", hash], function(err, data){
if(err) return next(err);
if(_.isString(data.url)){
to_del.push([
"rulesets",
"url",
data.url.toLowerCase().trim(),
data.rid,
hash
]);
}
next();
});
}, function(err){
if(err) return callback(err);
var val = {
id: id,
at: at,
event: event
}
dbRange(ldb, {
prefix: ["appvars", rid],
values: false
}, function(key){
to_del.push(key);
}, function(err){
if(err) return callback(err);
ldb.batch([
{type: 'put', key: ['scheduled', id], value: val},
{type: 'put', key: ['scheduled_by_at', at, id], value: val}
], function (err) {
if (err) return callback(err)
ldb.batch(_.map(to_del, function(key){
return {type: "del", key: key};
}), callback);
});
});
});
},
callback(null, val)
})
},
nextScheduleEventAt: function (callback) {
var r
dbRange(ldb, {
prefix: ['scheduled_by_at'],
limit: 1// peek the first one
}, function (data) {
r = {
id: data.value.id,
at: data.key[1], // Date object
event: data.value.event
}
}, function (err) {
callback(err, r)
})
},
removeScheduleEventAt: function (id, at, callback) {
ldb.batch([
{type: 'del', key: ['scheduled', id]},
{type: 'del', key: ['scheduled_by_at', at, id]}
], callback)
},
scheduleEventRepeat: function (timespec, event, callback) {
var id = newID()
var val = {
id: id,
timespec: timespec,
event: event
}
////////////////////////////////////////////////////////////////////////
//
// scheduled events
//
scheduleEventAt: function(at, event, callback){
var id = newID();
ldb.batch([
{type: 'put', key: ['scheduled', id], value: val}
], function (err) {
if (err) return callback(err)
var val = {
id: id,
at: at,
event: event
};
callback(null, val)
})
},
listScheduled: function (callback) {
var r = []
dbRange(ldb, {
prefix: ['scheduled']
}, function (data) {
var val = data.value
r.push(val)
}, function (err) {
callback(err, _.sortBy(r, 'at'))
})
},
removeScheduled: function (id, callback) {
ldb.get(['scheduled', id], function (err, info) {
if (err) return callback(err)
ldb.batch([
{type: "put", key: ["scheduled", id], value: val},
{type: "put", key: ["scheduled_by_at", at, id], value: val},
], function(err){
if(err) return callback(err);
var dbOps = [
{type: 'del', key: ['scheduled', id]}
]
if (_.has(info, 'at')) {
// also remove the `at` index
dbOps.push({type: 'del', key: ['scheduled_by_at', new Date(info.at), id]})
}
callback(null, val);
});
},
nextScheduleEventAt: function(callback){
var r;
dbRange(ldb, {
prefix: ["scheduled_by_at"],
limit: 1,//peek the first one
}, function(data){
r = {
id: data.value.id,
at: data.key[1],//Date object
event: data.value.event,
};
}, function(err){
callback(err, r);
});
},
removeScheduleEventAt: function(id, at, callback){
ldb.batch([
{type: "del", key: ["scheduled", id]},
{type: "del", key: ["scheduled_by_at", at, id]},
], callback);
},
scheduleEventRepeat: function(timespec, event, callback){
var id = newID();
ldb.batch(dbOps, callback)
})
},
var val = {
id: id,
timespec: timespec,
event: event
};
/// /////////////////////////////////////////////////////////////////////
//
// db migrations
//
getMigrationLog: getMigrationLog,
recordMigration: recordMigration,
removeMigration: removeMigration,
checkAndRunMigrations: function (callback) {
getMigrationLog(function (err, log) {
if (err) return callback(err)
ldb.batch([
{type: "put", key: ["scheduled", id], value: val},
], function(err){
if(err) return callback(err);
var toRun = []
_.each(migrations, function (m, version) {
if (!_.has(log, version)) {
toRun.push(version)
}
})
toRun.sort()// must run in order
callback(null, val);
});
},
listScheduled: function(callback){
var r = [];
dbRange(ldb, {
prefix: ["scheduled"],
}, function(data){
var val = data.value;
r.push(val);
}, function(err){
callback(err, _.sortBy(r, "at"));
});
},
removeScheduled: function(id, callback){
ldb.get(["scheduled", id], function(err, info){
if(err) return callback(err);
async.eachSeries(toRun, function (version, next) {
var m = migrations[version]
m.up(ldb, function (err, data) {
if (err) return next(err)
recordMigration(version, next)
})
}, callback)
})
}
}
}
var db_ops = [
{type: "del", key: ["scheduled", id]},
];
if(_.has(info, "at")){
//also remove the `at` index
db_ops.push({type: "del", key: ["scheduled_by_at", new Date(info.at), id]});
}
ldb.batch(db_ops, callback);
});
},
////////////////////////////////////////////////////////////////////////
//
// db migrations
//
getMigrationLog: getMigrationLog,
recordMigration: recordMigration,
removeMigration: removeMigration,
checkAndRunMigrations: function(callback){
getMigrationLog(function(err, log){
if(err) return callback(err);
var to_run = [];
_.each(migrations, function(m, version){
if( ! _.has(log, version)){
to_run.push(version);
}
});
to_run.sort();//must run in order
async.eachSeries(to_run, function(version, next){
var m = migrations[version];
m.up(ldb, function(err, data){
if(err) return next(err);
recordMigration(version, next);
});
}, callback);
});
},
};
};
module.exports.ADMIN_POLICY_ID = ADMIN_POLICY_ID;
module.exports.ADMIN_POLICY_ID = ADMIN_POLICY_ID

@@ -1,994 +0,975 @@

var _ = require("lodash");
var DB = require("./DB");
var cocb = require("co-callback");
var cuid = require("cuid");
var test = require("tape");
var async = require("async");
var ktypes = require("krl-stdlib/types");
var memdown = require("memdown");
var migrations = require("./migrations");
var ADMIN_POLICY_ID = require("./DB").ADMIN_POLICY_ID;
var _ = require('lodash')
var DB = require('./DB')
var util = require('util')
var cuid = require('cuid')
var test = require('tape')
var async = require('async')
var testA = require('../test/helpers/testA')
var ktypes = require('krl-stdlib/types')
var memdown = require('memdown')
var migrations = require('./migrations')
var ADMIN_POLICY_ID = require('./DB').ADMIN_POLICY_ID
var mkTestDB = function () {
var db = DB({
db: memdown(cuid()),
__use_sequential_ids_for_testing: true
})
_.each(db, function (val, key) {
if (_.isFunction(val)) {
db[key + 'Yieldable'] = util.promisify(val)
}
})
return db
}
var mkTestDB = function(){
return DB({
db: memdown(cuid()),
__use_sequential_ids_for_testing: true,
});
};
test('DB - write and read', function (t) {
var db = mkTestDB()
async.series({
start_db: async.apply(db.toObj),
pico0: async.apply(db.newPico, {}),
rule0: async.apply(db.addRulesetToPico, 'id0', 'rs0'),
chan2: async.apply(db.newChannel, {pico_id: 'id0', name: 'two', type: 't', policy_id: ADMIN_POLICY_ID}),
pico1: async.apply(db.newPico, {parent_id: 'id0'}),
end_db: async.apply(db.toObj),
rmpico0: async.apply(db.removePico, 'id0'),
rmpico1: async.apply(db.removePico, 'id3'),
post_del_db: async.apply(db.toObj)
}, function (err, data) {
if (err) return t.end(err)
test("DB - write and read", function(t){
var db = mkTestDB();
async.series({
start_db: async.apply(db.toObj),
pico0: async.apply(db.newPico, {}),
rule0: async.apply(db.addRulesetToPico, "id0", "rs0"),
chan2: async.apply(db.newChannel, {pico_id: "id0", name: "two", type: "t", policy_id: ADMIN_POLICY_ID}),
pico1: async.apply(db.newPico, {parent_id: "id0"}),
end_db: async.apply(db.toObj),
rmpico0: async.apply(db.removePico, "id0"),
rmpico1: async.apply(db.removePico, "id3"),
post_del_db: async.apply(db.toObj)
}, function(err, data){
if(err) return t.end(err);
t.deepEquals(data.start_db, {})
t.deepEquals(data.start_db, {});
t.deepEquals(data.end_db, {
channel: {
id1: {
pico_id: "id0",
id: "id1",
name: "admin",
type: "secret",
policy_id: ADMIN_POLICY_ID,
sovrin: {
did: "id1",
verifyKey: "verifyKey_id1",
secret: {
seed: "seed_id1",
signKey: "signKey_id1",
},
},
},
id2: {
pico_id: "id0",
id: "id2",
name: "two",
type: "t",
policy_id: ADMIN_POLICY_ID,
sovrin: {
did: "id2",
verifyKey: "verifyKey_id2",
secret: {
seed: "seed_id2",
signKey: "signKey_id2",
},
},
},
id4: {
pico_id: "id3",
id: "id4",
name: "admin",
type: "secret",
policy_id: ADMIN_POLICY_ID,
sovrin: {
did: "id4",
verifyKey: "verifyKey_id4",
secret: {
seed: "seed_id4",
signKey: "signKey_id4",
},
},
},
},
pico: {
"id0": {
id: "id0",
parent_id: null,
admin_eci: "id1",
},
"id3": {
id: "id3",
parent_id: "id0",
admin_eci: "id4",
},
},
"pico-ruleset": {"id0": {"rs0": {on: true}}},
"ruleset-pico": {"rs0": {"id0": {on: true}}},
"pico-children": {"id0": {"id3": true}},
"pico-eci-list": {
"id0": {
"id1": true,
"id2": true,
},
"id3": {
"id4": true,
},
},
"root_pico": {
id: "id0",
parent_id: null,
admin_eci: "id1",
},
});
t.deepEquals(data.post_del_db, {});
t.end();
});
});
test("DB - storeRuleset", function(t){
var db = mkTestDB();
var krl_src = "ruleset io.picolabs.cool {}";
var rid = "io.picolabs.cool";
var hash = "7d71c05bc934b0d41fdd2055c7644fc4d0d3eabf303d67fb97f604eaab2c0aa1";
var timestamp = (new Date()).toISOString();
var url = "Some-URL-to-src ";
var expected = {};
_.set(expected, ["rulesets", "krl", hash], {
src: krl_src,
rid: rid,
url: url,
timestamp: timestamp
});
_.set(expected, ["rulesets", "versions", rid, timestamp, hash], true);
_.set(expected, ["rulesets", "url", url.toLowerCase().trim(), rid, hash], true);
async.series({
start_db: async.apply(db.toObj),
store: function(next){
db.storeRuleset(krl_src, {
url: url
}, next, timestamp);
t.deepEquals(data.end_db, {
channel: {
id1: {
pico_id: 'id0',
id: 'id1',
name: 'admin',
type: 'secret',
policy_id: ADMIN_POLICY_ID,
sovrin: {
did: 'id1',
verifyKey: 'verifyKey_id1',
secret: {
seed: 'seed_id1',
signKey: 'signKey_id1'
}
}
},
findRulesetsByURL: async.apply(db.findRulesetsByURL, url),
end_db: async.apply(db.toObj)
}, function(err, data){
if(err) return t.end(err);
t.deepEquals(data.start_db, {});
t.deepEquals(data.store, {rid: rid, hash: hash});
t.deepEquals(data.findRulesetsByURL, [{
rid: rid,
hash: hash
}]);
t.deepEquals(data.end_db, expected);
t.end();
});
});
test("DB - enableRuleset", function(t){
var db = mkTestDB();
var krl_src = "ruleset io.picolabs.cool {}";
//TODO
async.waterfall([
function(callback){
db.toObj(callback);
id2: {
pico_id: 'id0',
id: 'id2',
name: 'two',
type: 't',
policy_id: ADMIN_POLICY_ID,
sovrin: {
did: 'id2',
verifyKey: 'verifyKey_id2',
secret: {
seed: 'seed_id2',
signKey: 'signKey_id2'
}
}
},
function(db_json, callback){
t.deepEquals(_.omit(db_json, "rulesets"), {});
db.storeRuleset(krl_src, {}, callback);
id4: {
pico_id: 'id3',
id: 'id4',
name: 'admin',
type: 'secret',
policy_id: ADMIN_POLICY_ID,
sovrin: {
did: 'id4',
verifyKey: 'verifyKey_id4',
secret: {
seed: 'seed_id4',
signKey: 'signKey_id4'
}
}
}
},
pico: {
'id0': {
id: 'id0',
parent_id: null,
admin_eci: 'id1'
},
function(data, callback){
db.enableRuleset(data.hash, function(err){
callback(err, data.hash);
});
'id3': {
id: 'id3',
parent_id: 'id0',
admin_eci: 'id4'
}
},
'pico-ruleset': {'id0': {'rs0': {on: true}}},
'ruleset-pico': {'rs0': {'id0': {on: true}}},
'pico-children': {'id0': {'id3': true}},
'pico-eci-list': {
'id0': {
'id1': true,
'id2': true
},
function(hash, callback){
db.toObj(function(err, db){
callback(err, db, hash);
});
},
function(db_json, hash, callback){
t.deepEquals(_.get(db_json, [
"rulesets",
"enabled",
"io.picolabs.cool",
"hash"
]), hash);
db.getEnabledRuleset("io.picolabs.cool", function(err, data){
if(err) return callback(err);
t.equals(data.src, krl_src);
t.equals(data.hash, hash);
t.equals(data.rid, "io.picolabs.cool");
t.equals(data.timestamp_enable, _.get(db_json, [
"rulesets",
"enabled",
"io.picolabs.cool",
"timestamp"
]));
callback();
});
'id3': {
'id4': true
}
], t.end);
});
},
'root_pico': {
id: 'id0',
parent_id: null,
admin_eci: 'id1'
}
})
test("DB - read keys that don't exist", function(t){
var db = mkTestDB();
t.deepEquals(data.post_del_db, {})
async.series({
ent: async.apply(db.getEntVar, "pico0", "rid0", "var that doesn't exisit", null),
app: async.apply(db.getAppVar, "rid0", "var that doesn't exisit", null)
}, function(err, data){
if(err) return t.end(err);
t.deepEquals(data.ent, undefined);
t.deepEquals(data.app, undefined);
t.end();
});
});
t.end()
})
})
test("DB - getRootPico", function(t){
var db = mkTestDB();
test('DB - storeRuleset', function (t) {
var db = mkTestDB()
var tstRoot = function(assertFn){
return function(next){
db.getRootPico(function(err, r_pico){
assertFn(err, r_pico);
next();
});
};
};
var krlSrc = 'ruleset io.picolabs.cool {}'
var rid = 'io.picolabs.cool'
var hash = '7d71c05bc934b0d41fdd2055c7644fc4d0d3eabf303d67fb97f604eaab2c0aa1'
var timestamp = (new Date()).toISOString()
var url = 'Some-URL-to-src '
async.series([
tstRoot(function(err, r_pico){
t.ok(err);
t.ok(err.notFound);
t.deepEquals(r_pico, void 0);
}),
async.apply(db.newChannel, {pico_id: "foo", name: "bar", type: "baz"}),
async.apply(db.newPico, {}),
tstRoot(function(err, r_pico){
t.notOk(err);
t.deepEquals(r_pico, {id: "id1", parent_id: null, admin_eci: "id2"});
}),
async.apply(db.newPico, {parent_id: "id1"}),
tstRoot(function(err, r_pico){
t.notOk(err);
t.deepEquals(r_pico, {id: "id1", parent_id: null, admin_eci: "id2"});
}),
async.apply(db.newPico, {parent_id: null}),
tstRoot(function(err, r_pico){
t.notOk(err);
t.deepEquals(r_pico, {id: "id5", parent_id: null, admin_eci: "id6"});
}),
], t.end);
});
var expected = {}
_.set(expected, ['rulesets', 'krl', hash], {
src: krlSrc,
rid: rid,
url: url,
timestamp: timestamp
})
_.set(expected, ['rulesets', 'versions', rid, timestamp, hash], true)
_.set(expected, ['rulesets', 'url', url.toLowerCase().trim(), rid, hash], true)
test("DB - isRulesetUsed", function(t){
var db = mkTestDB();
async.series({
start_db: async.apply(db.toObj),
store: function (next) {
db.storeRuleset(krlSrc, {
url: url
}, next, timestamp)
},
findRulesetsByURL: async.apply(db.findRulesetsByURL, url),
end_db: async.apply(db.toObj)
}, function (err, data) {
if (err) return t.end(err)
t.deepEquals(data.start_db, {})
t.deepEquals(data.store, {rid: rid, hash: hash})
t.deepEquals(data.findRulesetsByURL, [{
rid: rid,
hash: hash
}])
t.deepEquals(data.end_db, expected)
t.end()
})
})
async.series({
pico0: async.apply(db.newPico, {}),
pico1: async.apply(db.newPico, {}),
test('DB - enableRuleset', function (t) {
var db = mkTestDB()
foo0: async.apply(db.addRulesetToPico, "id0", "rs-foo"),
foo1: async.apply(db.addRulesetToPico, "id1", "rs-foo"),
bar0: async.apply(db.addRulesetToPico, "id0", "rs-bar"),
var krlSrc = 'ruleset io.picolabs.cool {}'
// TODO
async.waterfall([
function (callback) {
db.toObj(callback)
},
function (dbJson, callback) {
t.deepEquals(_.omit(dbJson, 'rulesets'), {})
db.storeRuleset(krlSrc, {}, callback)
},
function (data, callback) {
db.enableRuleset(data.hash, function (err) {
callback(err, data.hash)
})
},
function (hash, callback) {
db.toObj(function (err, db) {
callback(err, db, hash)
})
},
function (dbJson, hash, callback) {
t.deepEquals(_.get(dbJson, [
'rulesets',
'enabled',
'io.picolabs.cool',
'hash'
]), hash)
db.getEnabledRuleset('io.picolabs.cool', function (err, data) {
if (err) return callback(err)
t.equals(data.src, krlSrc)
t.equals(data.hash, hash)
t.equals(data.rid, 'io.picolabs.cool')
t.equals(data.timestamp_enable, _.get(dbJson, [
'rulesets',
'enabled',
'io.picolabs.cool',
'timestamp'
]))
callback()
})
}
], t.end)
})
is_foo: async.apply(db.isRulesetUsed, "rs-foo"),
is_bar: async.apply(db.isRulesetUsed, "rs-bar"),
is_baz: async.apply(db.isRulesetUsed, "rs-baz"),
is_qux: async.apply(db.isRulesetUsed, "rs-qux"),
}, function(err, data){
if(err) return t.end(err);
t.equals(data.is_foo, true);
t.equals(data.is_bar, true);
t.equals(data.is_baz, false);
t.equals(data.is_qux, false);
t.end();
});
});
testA("DB - read keys that don't exist", async function (t) {
var db = mkTestDB()
test("DB - deleteRuleset", function(t){
var db = mkTestDB();
var ent = await db.getEntVarYieldable('pico0', 'rid0', "var that doesn't exisit", null)
t.is(ent, undefined)
var storeRuleset = function(name){
return function(callback){
var rid = "io.picolabs." + name;
var krl = "ruleset " + rid + " {}";
db.storeRuleset(krl, {
url: "file:///" + name + ".krl"
}, function(err, data){
if(err) return callback(err);
db.enableRuleset(data.hash, function(err){
if(err) return callback(err);
db.putAppVar(rid, "my_var", null, "appvar value", function(err){
callback(err, data.hash);
});
});
});
};
};
var app = await db.getAppVarYieldable('rid0', "var that doesn't exisit", null)
t.is(app, undefined)
})
async.series({
store_foo: storeRuleset("foo"),
store_bar: storeRuleset("bar"),
test('DB - getRootPico', function (t) {
var db = mkTestDB()
init_db: async.apply(db.toObj),
var tstRoot = function (assertFn) {
return function (next) {
db.getRootPico(function (err, rPico) {
assertFn(err, rPico)
next()
})
}
}
del_foo: async.apply(db.deleteRuleset, "io.picolabs.foo"),
async.series([
tstRoot(function (err, rPico) {
t.ok(err)
t.ok(err.notFound)
t.deepEquals(rPico, void 0)
}),
async.apply(db.newChannel, {pico_id: 'foo', name: 'bar', type: 'baz'}),
async.apply(db.newPico, {}),
tstRoot(function (err, rPico) {
t.notOk(err)
t.deepEquals(rPico, {id: 'id1', parent_id: null, admin_eci: 'id2'})
}),
async.apply(db.newPico, {parent_id: 'id1'}),
tstRoot(function (err, rPico) {
t.notOk(err)
t.deepEquals(rPico, {id: 'id1', parent_id: null, admin_eci: 'id2'})
}),
async.apply(db.newPico, {parent_id: null}),
tstRoot(function (err, rPico) {
t.notOk(err)
t.deepEquals(rPico, {id: 'id5', parent_id: null, admin_eci: 'id6'})
})
], t.end)
})
end_db: async.apply(db.toObj),
}, function(err, data){
if(err) return t.end(err);
test('DB - isRulesetUsed', function (t) {
var db = mkTestDB()
t.deepEquals(_.keys(data.init_db.rulesets.versions), [
"io.picolabs.bar",
"io.picolabs.foo",
], "ensure all were actually stored in the db");
async.series({
pico0: async.apply(db.newPico, {}),
pico1: async.apply(db.newPico, {}),
t.deepEquals(_.keys(data.end_db.rulesets.versions), [
"io.picolabs.bar",
], "ensure io.picolabs.foo was removed");
foo0: async.apply(db.addRulesetToPico, 'id0', 'rs-foo'),
foo1: async.apply(db.addRulesetToPico, 'id1', 'rs-foo'),
bar0: async.apply(db.addRulesetToPico, 'id0', 'rs-bar'),
is_foo: async.apply(db.isRulesetUsed, 'rs-foo'),
is_bar: async.apply(db.isRulesetUsed, 'rs-bar'),
is_baz: async.apply(db.isRulesetUsed, 'rs-baz'),
is_qux: async.apply(db.isRulesetUsed, 'rs-qux')
}, function (err, data) {
if (err) return t.end(err)
t.equals(data.is_foo, true)
t.equals(data.is_bar, true)
t.equals(data.is_baz, false)
t.equals(data.is_qux, false)
t.end()
})
})
//make the `init_db` look like the expected `end_db`
var expected_db = _.cloneDeep(data.init_db);
t.deepEqual(expected_db, data.init_db, "sanity check");
test('DB - deleteRuleset', function (t) {
var db = mkTestDB()
delete expected_db.rulesets.enabled["io.picolabs.foo"];
delete expected_db.rulesets.krl[data.store_foo];
delete expected_db.rulesets.url["file:///foo.krl"];
delete expected_db.rulesets.versions["io.picolabs.foo"];
delete expected_db.appvars["io.picolabs.foo"];
var storeRuleset = function (name) {
return function (callback) {
var rid = 'io.picolabs.' + name
var krl = 'ruleset ' + rid + ' {}'
db.storeRuleset(krl, {
url: 'file:///' + name + '.krl'
}, function (err, data) {
if (err) return callback(err)
db.enableRuleset(data.hash, function (err) {
if (err) return callback(err)
db.putAppVar(rid, 'my_var', null, 'appvar value', function (err) {
callback(err, data.hash)
})
})
})
}
}
t.notDeepEqual(expected_db, data.init_db, "sanity check");
t.deepEquals(data.end_db, expected_db);
async.series({
store_foo: storeRuleset('foo'),
store_bar: storeRuleset('bar'),
t.end();
});
});
init_db: async.apply(db.toObj),
test("DB - scheduleEventAt", function(t){
var db = mkTestDB();
del_foo: async.apply(db.deleteRuleset, 'io.picolabs.foo'),
var eventAt = function(date, type){
return function(callback){
db.scheduleEventAt(new Date(date), {
domain: "foobar",
type: type,
attributes: {some: "attr"},
}, callback);
};
};
var rmAt = function(id){
return function(callback){
db.removeScheduled(id, callback);
};
};
end_db: async.apply(db.toObj)
}, function (err, data) {
if (err) return t.end(err)
var getNext = async.apply(db.nextScheduleEventAt);
t.deepEquals(_.keys(data.init_db.rulesets.versions), [
'io.picolabs.bar',
'io.picolabs.foo'
], 'ensure all were actually stored in the db')
async.series({
init_db: async.apply(db.toObj),
next0: getNext,
at0: eventAt("Feb 22, 2222", "foo"),
next1: getNext,
at1: eventAt("Feb 23, 2222", "bar"),
next2: getNext,
at2: eventAt("Feb 2, 2222", "baz"),
next3: getNext,
t.deepEquals(_.keys(data.end_db.rulesets.versions), [
'io.picolabs.bar'
], 'ensure io.picolabs.foo was removed')
list: async.apply(db.listScheduled),
// make the `init_db` look like the expected `end_db`
var expectedDb = _.cloneDeep(data.init_db)
t.deepEqual(expectedDb, data.init_db, 'sanity check')
rm0: rmAt("id0"),
next4: getNext,
rm2: rmAt("id2"),
next5: getNext,
rm1: rmAt("id1"),
next6: getNext,
delete expectedDb.rulesets.enabled['io.picolabs.foo']
delete expectedDb.rulesets.krl[data.store_foo]
delete expectedDb.rulesets.url['file:///foo.krl']
delete expectedDb.rulesets.versions['io.picolabs.foo']
delete expectedDb.appvars['io.picolabs.foo']
end_db: async.apply(db.toObj),
}, function(err, data){
if(err) return t.end(err);
t.notDeepEqual(expectedDb, data.init_db, 'sanity check')
t.deepEquals(data.end_db, expectedDb)
t.deepEquals(data.init_db, {});
t.end()
})
})
t.deepEquals(data.at0, {
id: "id0",
at: new Date("Feb 22, 2222"),
event: {domain: "foobar", type: "foo", attributes: {some: "attr"}},
});
t.deepEquals(data.at1, {
id: "id1",
at: new Date("Feb 23, 2222"),
event: {domain: "foobar", type: "bar", attributes: {some: "attr"}},
});
t.deepEquals(data.at2, {
id: "id2",
at: new Date("Feb 2, 2222"),
event: {domain: "foobar", type: "baz", attributes: {some: "attr"}},
});
test('DB - scheduleEventAt', function (t) {
var db = mkTestDB()
t.deepEquals(data.list, [
data.at2,
data.at0,
data.at1,
].map(function(val){
return _.assign({}, val, {
at: val.at.toISOString(),
});
}));
var eventAt = function (date, type) {
return function (callback) {
db.scheduleEventAt(new Date(date), {
domain: 'foobar',
type: type,
attributes: {some: 'attr'}
}, callback)
}
}
var rmAt = function (id) {
return function (callback) {
db.removeScheduled(id, callback)
}
}
t.deepEquals(data.next0, void 0, "nothing scheduled");
t.ok(_.has(data, "next0"), "ensure next0 was actually tested");
t.deepEquals(data.next1, data.at0, "only one scheduled");
t.deepEquals(data.next2, data.at0, "at0 is still sooner than at1");
t.deepEquals(data.next3, data.at2, "at2 is sooner than at0");
t.deepEquals(data.next4, data.at2);
t.deepEquals(data.next5, data.at1, "at1 is soonest now that at0 and at2 were removed");
t.deepEquals(data.next6, void 0, "nothing scheduled");
t.ok(_.has(data, "next6"), "ensure next6 was actually tested");
var getNext = async.apply(db.nextScheduleEventAt)
t.deepEquals(data.end_db, {}, "should be nothing left in the db");
async.series({
init_db: async.apply(db.toObj),
next0: getNext,
at0: eventAt('Feb 22, 2222', 'foo'),
next1: getNext,
at1: eventAt('Feb 23, 2222', 'bar'),
next2: getNext,
at2: eventAt('Feb 2, 2222', 'baz'),
next3: getNext,
t.end();
});
});
list: async.apply(db.listScheduled),
test("DB - scheduleEventRepeat", function(t){
var db = mkTestDB();
rm0: rmAt('id0'),
next4: getNext,
rm2: rmAt('id2'),
next5: getNext,
rm1: rmAt('id1'),
next6: getNext,
var eventRep = function(timespec, type){
return function(callback){
db.scheduleEventRepeat(timespec, {
domain: "foobar",
type: type,
attributes: {some: "attr"},
}, callback);
};
};
async.series({
init_db: async.apply(db.toObj),
end_db: async.apply(db.toObj)
}, function (err, data) {
if (err) return t.end(err)
rep0: eventRep("*/5 * * * * *", "foo"),
rep1: eventRep("* */5 * * * *", "bar"),
t.deepEquals(data.init_db, {})
mid_db: async.apply(db.toObj),
t.deepEquals(data.at0, {
id: 'id0',
at: new Date('Feb 22, 2222'),
event: {domain: 'foobar', type: 'foo', attributes: {some: 'attr'}}
})
t.deepEquals(data.at1, {
id: 'id1',
at: new Date('Feb 23, 2222'),
event: {domain: 'foobar', type: 'bar', attributes: {some: 'attr'}}
})
t.deepEquals(data.at2, {
id: 'id2',
at: new Date('Feb 2, 2222'),
event: {domain: 'foobar', type: 'baz', attributes: {some: 'attr'}}
})
list: async.apply(db.listScheduled),
t.deepEquals(data.list, [
data.at2,
data.at0,
data.at1
].map(function (val) {
return _.assign({}, val, {
at: val.at.toISOString()
})
}))
rm0: async.apply(db.removeScheduled, "id0"),
rm1: async.apply(db.removeScheduled, "id1"),
t.deepEquals(data.next0, void 0, 'nothing scheduled')
t.ok(_.has(data, 'next0'), 'ensure next0 was actually tested')
t.deepEquals(data.next1, data.at0, 'only one scheduled')
t.deepEquals(data.next2, data.at0, 'at0 is still sooner than at1')
t.deepEquals(data.next3, data.at2, 'at2 is sooner than at0')
t.deepEquals(data.next4, data.at2)
t.deepEquals(data.next5, data.at1, 'at1 is soonest now that at0 and at2 were removed')
t.deepEquals(data.next6, void 0, 'nothing scheduled')
t.ok(_.has(data, 'next6'), 'ensure next6 was actually tested')
end_db: async.apply(db.toObj),
}, function(err, data){
if(err) return t.end(err);
t.deepEquals(data.end_db, {}, 'should be nothing left in the db')
t.deepEquals(data.init_db, {});
t.end()
})
})
t.deepEquals(data.rep0, {
id: "id0",
timespec: "*/5 * * * * *",
event: {domain: "foobar", type: "foo", attributes: {some: "attr"}},
});
t.deepEquals(data.rep1, {
id: "id1",
timespec: "* */5 * * * *",
event: {domain: "foobar", type: "bar", attributes: {some: "attr"}},
});
test('DB - scheduleEventRepeat', function (t) {
var db = mkTestDB()
t.deepEquals(data.mid_db, {scheduled: {
id0: data.rep0,
id1: data.rep1,
}});
var eventRep = function (timespec, type) {
return function (callback) {
db.scheduleEventRepeat(timespec, {
domain: 'foobar',
type: type,
attributes: {some: 'attr'}
}, callback)
}
}
async.series({
init_db: async.apply(db.toObj),
t.deepEquals(data.list, [
data.rep0,
data.rep1,
]);
rep0: eventRep('*/5 * * * * *', 'foo'),
rep1: eventRep('* */5 * * * *', 'bar'),
t.deepEquals(data.end_db, {}, "should be nothing left in the db");
mid_db: async.apply(db.toObj),
t.end();
});
});
list: async.apply(db.listScheduled),
test("DB - removeRulesetFromPico", function(t){
var db = mkTestDB();
rm0: async.apply(db.removeScheduled, 'id0'),
rm1: async.apply(db.removeScheduled, 'id1'),
async.series({
addRS: async.apply(db.addRulesetToPico, "pico0", "rid0"),
ent0: async.apply(db.putEntVar, "pico0", "rid0", "foo", null, "val0"),
ent1: async.apply(db.putEntVar, "pico0", "rid0", "bar", null, "val1"),
db_before: async.apply(db.toObj),
end_db: async.apply(db.toObj)
}, function (err, data) {
if (err) return t.end(err)
rmRS: async.apply(db.removeRulesetFromPico, "pico0", "rid0"),
t.deepEquals(data.init_db, {})
db_after: async.apply(db.toObj),
}, function(err, data){
if(err) return t.end(err);
t.deepEquals(data.rep0, {
id: 'id0',
timespec: '*/5 * * * * *',
event: {domain: 'foobar', type: 'foo', attributes: {some: 'attr'}}
})
t.deepEquals(data.rep1, {
id: 'id1',
timespec: '* */5 * * * *',
event: {domain: 'foobar', type: 'bar', attributes: {some: 'attr'}}
})
t.deepEquals(data.db_before, {
entvars: {pico0: {rid0: {
foo: {type: "String", value: "val0"},
bar: {type: "String", value: "val1"},
}}},
"pico-ruleset": {"pico0": {"rid0": {on: true}}},
"ruleset-pico": {"rid0": {"pico0": {on: true}}},
});
t.deepEquals(data.mid_db, {scheduled: {
id0: data.rep0,
id1: data.rep1
}})
t.deepEquals(data.db_after, {}, "should all be gone");
t.deepEquals(data.list, [
data.rep0,
data.rep1
])
t.end();
});
});
t.deepEquals(data.end_db, {}, 'should be nothing left in the db')
test("DB - getPicoIDByECI", function(t){
var db = mkTestDB();
async.series({
pico0: async.apply(db.newPico, {}),
pico2: async.apply(db.newPico, {}),
t.end()
})
})
c4_p0: async.apply(db.newChannel, {pico_id: "id0", name: "four", type: "t"}),
c5_p1: async.apply(db.newChannel, {pico_id: "id2", name: "five", type: "t"}),
test('DB - removeRulesetFromPico', function (t) {
var db = mkTestDB()
get_c2: async.apply(db.getPicoIDByECI, "id1"),
get_c3: async.apply(db.getPicoIDByECI, "id3"),
get_c4: async.apply(db.getPicoIDByECI, "id4"),
get_c5: async.apply(db.getPicoIDByECI, "id5"),
async.series({
addRS: async.apply(db.addRulesetToPico, 'pico0', 'rid0'),
ent0: async.apply(db.putEntVar, 'pico0', 'rid0', 'foo', null, 'val0'),
ent1: async.apply(db.putEntVar, 'pico0', 'rid0', 'bar', null, 'val1'),
db_before: async.apply(db.toObj),
}, function(err, data){
if(err) return t.end(err);
rmRS: async.apply(db.removeRulesetFromPico, 'pico0', 'rid0'),
t.deepEquals(data.get_c2, "id0");
t.deepEquals(data.get_c3, "id2");
t.deepEquals(data.get_c4, "id0");
t.deepEquals(data.get_c5, "id2");
db_after: async.apply(db.toObj)
}, function (err, data) {
if (err) return t.end(err)
db.getPicoIDByECI("bad-id", function(err, id){
t.ok(err);
t.ok((err && err.notFound) === true);
t.notOk(id);
t.end();
});
});
});
t.deepEquals(data.db_before, {
entvars: {pico0: {rid0: {
foo: {type: 'String', value: 'val0'},
bar: {type: 'String', value: 'val1'}
}}},
'pico-ruleset': {'pico0': {'rid0': {on: true}}},
'ruleset-pico': {'rid0': {'pico0': {on: true}}}
})
test("DB - listChannels", function(t){
var db = mkTestDB();
async.series({
pico0: async.apply(db.newPico, {}),
pico2: async.apply(db.newPico, {}),
t.deepEquals(data.db_after, {}, 'should all be gone')
c4_p0: async.apply(db.newChannel, {pico_id: "id0", name: "four", type: "t4", policy_id: ADMIN_POLICY_ID}),
c5_p1: async.apply(db.newChannel, {pico_id: "id2", name: "five", type: "t5", policy_id: ADMIN_POLICY_ID}),
t.end()
})
})
list0: async.apply(db.listChannels, "id0"),
list2: async.apply(db.listChannels, "id2"),
list404: async.apply(db.listChannels, "id404"),
test('DB - getPicoIDByECI', function (t) {
var db = mkTestDB()
async.series({
pico0: async.apply(db.newPico, {}),
pico2: async.apply(db.newPico, {}),
}, function(err, data){
if(err) return t.end(err);
c4_p0: async.apply(db.newChannel, {pico_id: 'id0', name: 'four', type: 't'}),
c5_p1: async.apply(db.newChannel, {pico_id: 'id2', name: 'five', type: 't'}),
var mkChan = function(pico_id, eci, name, type){
return {
pico_id: pico_id,
id: eci,
name: name,
type: type,
policy_id: ADMIN_POLICY_ID,
sovrin: {
did: eci,
verifyKey: "verifyKey_" + eci,
},
};
};
get_c2: async.apply(db.getPicoIDByECI, 'id1'),
get_c3: async.apply(db.getPicoIDByECI, 'id3'),
get_c4: async.apply(db.getPicoIDByECI, 'id4'),
get_c5: async.apply(db.getPicoIDByECI, 'id5')
var c1 = mkChan("id0", "id1", "admin", "secret");
var c3 = mkChan("id2", "id3", "admin", "secret");
var c4 = mkChan("id0", "id4", "four", "t4");
var c5 = mkChan("id2", "id5", "five", "t5");
}, function (err, data) {
if (err) return t.end(err)
t.deepEquals(data.get_c2, 'id0')
t.deepEquals(data.get_c3, 'id2')
t.deepEquals(data.get_c4, 'id0')
t.deepEquals(data.get_c5, 'id2')
t.deepEquals(data.c4_p0, c4);
t.deepEquals(data.c5_p1, c5);
db.getPicoIDByECI('bad-id', function (err, id) {
t.ok(err)
t.ok((err && err.notFound) === true)
t.notOk(id)
t.end()
})
})
})
t.deepEquals(data.list0, [c1, c4]);
t.deepEquals(data.list2, [c3, c5]);
t.deepEquals(data.list404, []);
test('DB - listChannels', function (t) {
var db = mkTestDB()
async.series({
pico0: async.apply(db.newPico, {}),
pico2: async.apply(db.newPico, {}),
t.end();
});
});
c4_p0: async.apply(db.newChannel, {pico_id: 'id0', name: 'four', type: 't4', policy_id: ADMIN_POLICY_ID}),
c5_p1: async.apply(db.newChannel, {pico_id: 'id2', name: 'five', type: 't5', policy_id: ADMIN_POLICY_ID}),
test("DB - listAllEnabledRIDs", function(t){
var db = mkTestDB();
list0: async.apply(db.listChannels, 'id0'),
list2: async.apply(db.listChannels, 'id2'),
list404: async.apply(db.listChannels, 'id404')
var hashes = {};
var store = function(rid){
return function(done){
db.storeRuleset("ruleset " + rid + "{}", {}, function(err, data){
hashes[rid] = data.hash;
done();
});
};
};
}, function (err, data) {
if (err) return t.end(err)
var enable = function(rid){
return function(done){
db.enableRuleset(hashes[rid], done);
};
};
var mkChan = function (picoId, eci, name, type) {
return {
pico_id: picoId,
id: eci,
name: name,
type: type,
policy_id: ADMIN_POLICY_ID,
sovrin: {
did: eci,
verifyKey: 'verifyKey_' + eci
}
}
}
async.series({
list0: async.apply(db.listAllEnabledRIDs),
var c1 = mkChan('id0', 'id1', 'admin', 'secret')
var c3 = mkChan('id2', 'id3', 'admin', 'secret')
var c4 = mkChan('id0', 'id4', 'four', 't4')
var c5 = mkChan('id2', 'id5', 'five', 't5')
s_foo: store("foo"),
s_bar: store("bar"),
s_baz: store("baz"),
list1: async.apply(db.listAllEnabledRIDs),
t.deepEquals(data.c4_p0, c4)
t.deepEquals(data.c5_p1, c5)
e_foo: enable("foo"),
list2: async.apply(db.listAllEnabledRIDs),
t.deepEquals(data.list0, [c1, c4])
t.deepEquals(data.list2, [c3, c5])
t.deepEquals(data.list404, [])
e_bar: enable("bar"),
e_baz: enable("baz"),
list3: async.apply(db.listAllEnabledRIDs),
t.end()
})
})
d_foo: async.apply(db.disableRuleset, "foo"),
list4: async.apply(db.listAllEnabledRIDs),
}, function(err, data){
if(err) return t.end(err);
test('DB - listAllEnabledRIDs', function (t) {
var db = mkTestDB()
t.deepEquals(data.list0, []);
t.deepEquals(data.list1, []);
t.deepEquals(data.list2, ["foo"]);
t.deepEquals(data.list3, ["bar", "baz", "foo"]);
t.deepEquals(data.list4, ["bar", "baz"]);
var hashes = {}
var store = function (rid) {
return function (done) {
db.storeRuleset('ruleset ' + rid + '{}', {}, function (err, data) {
if (err) return done(err)
hashes[rid] = data.hash
done()
})
}
}
t.end();
});
});
var enable = function (rid) {
return function (done) {
db.enableRuleset(hashes[rid], done)
}
}
test("DB - migrations", function(t){
var db = mkTestDB();
async.series([
function(next){
db.getMigrationLog(function(err, log){
if(err) return next(err);
t.deepEquals(log, {});
next();
});
},
async.apply(db.recordMigration, "v1"),
function(next){
db.getMigrationLog(function(err, log){
if(err) return next(err);
async.series({
list0: async.apply(db.listAllEnabledRIDs),
t.deepEquals(_.keys(log), ["v1"]);
t.deepEquals(_.keys(log["v1"]), ["timestamp"]);
t.equals(log["v1"].timestamp, (new Date(log["v1"].timestamp)).toISOString());
s_foo: store('foo'),
s_bar: store('bar'),
s_baz: store('baz'),
list1: async.apply(db.listAllEnabledRIDs),
next();
});
},
async.apply(db.recordMigration, "v200"),
function(next){
db.getMigrationLog(function(err, log){
if(err) return next(err);
t.deepEquals(_.keys(log), ["v1", "v200"]);
next();
});
},
async.apply(db.removeMigration, "v200"),
function(next){
db.getMigrationLog(function(err, log){
if(err) return next(err);
t.deepEquals(_.keys(log), ["v1"]);
next();
});
},
async.apply(db.removeMigration, "v1"),
function(next){
db.getMigrationLog(function(err, log){
if(err) return next(err);
t.deepEquals(log, {});
next();
});
},
async.apply(db.checkAndRunMigrations),
function(next){
db.getMigrationLog(function(err, log){
if(err) return next(err);
t.deepEquals(_.keys(log), _.keys(migrations));
next();
});
},
], t.end);
});
e_foo: enable('foo'),
list2: async.apply(db.listAllEnabledRIDs),
test("DB - parent/child", function(t){
var db = mkTestDB();
e_bar: enable('bar'),
e_baz: enable('baz'),
list3: async.apply(db.listAllEnabledRIDs),
var assertParent = function(pico_id, expected_parent_id){
return function(next){
db.getParent(pico_id, function(err, parent_id){
if(err) return next(err);
t.equals(parent_id, expected_parent_id, "testing db.getParent");
next();
});
};
};
d_foo: async.apply(db.disableRuleset, 'foo'),
list4: async.apply(db.listAllEnabledRIDs)
}, function (err, data) {
if (err) return t.end(err)
var assertChildren = function(pico_id, expected_children_ids){
return function(next){
db.listChildren(pico_id, function(err, list){
if(err) return next(err);
t.deepEquals(list, expected_children_ids, "testing db.listChildren");
next();
});
};
};
t.deepEquals(data.list0, [])
t.deepEquals(data.list1, [])
t.deepEquals(data.list2, ['foo'])
t.deepEquals(data.list3, ['bar', 'baz', 'foo'])
t.deepEquals(data.list4, ['bar', 'baz'])
t.end()
})
})
async.series([
async.apply(db.newPico, {}),// id0 and channel id1
async.apply(db.newPico, {parent_id: "id0"}),// id2 + id3
async.apply(db.newPico, {parent_id: "id0"}),// id4 + id5
async.apply(db.newPico, {parent_id: "id0"}),// id6 + id7
test('DB - migrations', function (t) {
var db = mkTestDB()
async.series([
function (next) {
db.getMigrationLog(function (err, log) {
if (err) return next(err)
t.deepEquals(log, {})
next()
})
},
async.apply(db.recordMigration, 'v1'),
function (next) {
db.getMigrationLog(function (err, log) {
if (err) return next(err)
async.apply(db.newPico, {parent_id: "id6"}),// id8 + id9
async.apply(db.newPico, {parent_id: "id6"}),// id10 + id11
t.deepEquals(_.keys(log), ['v1'])
t.deepEquals(_.keys(log['v1']), ['timestamp'])
t.equals(log['v1'].timestamp, (new Date(log['v1'].timestamp)).toISOString())
assertParent("id0", null),
assertParent("id2", "id0"),
assertParent("id4", "id0"),
assertParent("id6", "id0"),
assertParent("id8", "id6"),
assertParent("id10", "id6"),
next()
})
},
async.apply(db.recordMigration, 'v200'),
function (next) {
db.getMigrationLog(function (err, log) {
if (err) return next(err)
t.deepEquals(_.keys(log), ['v1', 'v200'])
next()
})
},
async.apply(db.removeMigration, 'v200'),
function (next) {
db.getMigrationLog(function (err, log) {
if (err) return next(err)
t.deepEquals(_.keys(log), ['v1'])
next()
})
},
async.apply(db.removeMigration, 'v1'),
function (next) {
db.getMigrationLog(function (err, log) {
if (err) return next(err)
t.deepEquals(log, {})
next()
})
},
async.apply(db.checkAndRunMigrations),
function (next) {
db.getMigrationLog(function (err, log) {
if (err) return next(err)
t.deepEquals(_.keys(log), _.keys(migrations))
next()
})
}
], t.end)
})
assertChildren("id0", ["id2", "id4", "id6"]),
assertChildren("id2", []),
assertChildren("id4", []),
assertChildren("id6", ["id10", "id8"]),
assertChildren("id8", []),
assertChildren("id10", []),
test('DB - parent/child', function (t) {
var db = mkTestDB()
async.apply(db.removePico, "id8"),
assertChildren("id6", ["id10"]),
var assertParent = function (picoId, expectedParentId) {
return function (next) {
db.getParent(picoId, function (err, parentId) {
if (err) return next(err)
t.equals(parentId, expectedParentId, 'testing db.getParent')
next()
})
}
}
async.apply(db.removePico, "id6"),
assertChildren("id6", []),
var assertChildren = function (picoId, expectedChildrenIds) {
return function (next) {
db.listChildren(picoId, function (err, list) {
if (err) return next(err)
t.deepEquals(list, expectedChildrenIds, 'testing db.listChildren')
next()
})
}
}
], t.end);
});
async.series([
async.apply(db.newPico, {}), // id0 and channel id1
async.apply(db.newPico, {parent_id: 'id0'}), // id2 + id3
async.apply(db.newPico, {parent_id: 'id0'}), // id4 + id5
async.apply(db.newPico, {parent_id: 'id0'}), // id6 + id7
async.apply(db.newPico, {parent_id: 'id6'}), // id8 + id9
async.apply(db.newPico, {parent_id: 'id6'}), // id10 + id11
test("DB - assertPicoID", function(t){
var db = mkTestDB();
assertParent('id0', null),
assertParent('id2', 'id0'),
assertParent('id4', 'id0'),
assertParent('id6', 'id0'),
assertParent('id8', 'id6'),
assertParent('id10', 'id6'),
var tstPID = function(id, expected_it){
return function(next){
db.assertPicoID(id, function(err, got_id){
if(expected_it){
t.notOk(err);
t.equals(got_id, id);
}else{
t.ok(err);
t.notOk(got_id);
}
next();
});
};
};
assertChildren('id0', ['id2', 'id4', 'id6']),
assertChildren('id2', []),
assertChildren('id4', []),
assertChildren('id6', ['id10', 'id8']),
assertChildren('id8', []),
assertChildren('id10', []),
async.series([
async.apply(db.newPico, {}),
async.apply(db.removePico, 'id8'),
assertChildren('id6', ['id10']),
tstPID(null, false),
tstPID(void 0, false),
tstPID({}, false),
tstPID(0, false),
async.apply(db.removePico, 'id6'),
assertChildren('id6', [])
tstPID("id0", true),
tstPID("id2", false),
], t.end)
})
], t.end);
});
test('DB - assertPicoID', function (t) {
var db = mkTestDB()
var tstPID = function (id, expectedIt) {
return function (next) {
db.assertPicoID(id, function (err, gotId) {
if (expectedIt) {
t.notOk(err)
t.equals(gotId, id)
} else {
t.ok(err)
t.notOk(gotId)
}
next()
})
}
}
test("DB - removeChannel", function(t){
var db = mkTestDB();
async.series([
async.apply(db.newPico, {}),
var assertECIs = function(pico_id, expected_ecis){
return function(next){
db.listChannels(pico_id, function(err, chans){
if(err) return next(err);
tstPID(null, false),
tstPID(void 0, false),
tstPID({}, false),
tstPID(0, false),
var eci_list = _.map(chans, "id");
t.deepEquals(eci_list, expected_ecis, "assert the listChannels");
t.deepEquals(_.uniq(_.map(chans, "pico_id")), [pico_id], "assert listChannels all come from the same pico_id");
tstPID('id0', true),
tstPID('id2', false)
next();
});
};
};
], t.end)
})
var assertFailRemoveECI = function(eci){
return function(next){
db.removeChannel(eci, function(err){
t.equals(err + "", "Error: Cannot delete the pico's admin channel");
next();
});
};
};
testA('DB - removeChannel', async function (t) {
var db = mkTestDB()
async.series([
var assertECIs = async function (picoId, expectedEcis) {
var chans = await db.listChannelsYieldable(picoId)
async.apply(db.newPico, {}),
assertECIs("id0", ["id1"]),
var eciList = _.map(chans, 'id')
t.is(eciList, expectedEcis, 'assert the listChannels')
t.is(_.uniq(_.map(chans, 'pico_id')), [picoId], 'assert listChannels all come from the same pico_id')
}
async.apply(db.newChannel, {pico_id: "id0", name: "two", type: "t"}),
assertECIs("id0", ["id1", "id2"]),
var assertFailRemoveECI = async function (eci) {
try {
await db.removeChannelYieldable(eci)
t.fail('Should error')
} catch (err) {
t.is(err + '', "Error: Cannot delete the pico's admin channel")
}
}
await db.newPicoYieldable({})
await assertECIs('id0', ['id1'])
assertFailRemoveECI("id1"),
assertECIs("id0", ["id1", "id2"]),
await db.newChannelYieldable({pico_id: 'id0', name: 'two', type: 't'})
await assertECIs('id0', ['id1', 'id2'])
async.apply(db.removeChannel, "id2"),
assertECIs("id0", ["id1"]),
await assertFailRemoveECI('id1')
await assertECIs('id0', ['id1', 'id2'])
assertFailRemoveECI("id1"),
assertECIs("id0", ["id1"]),
await db.removeChannelYieldable('id2')
await assertECIs('id0', ['id1'])
async.apply(db.newPico, {parent_id: "id0"}),
assertECIs("id3", ["id4"]),
assertFailRemoveECI("id4"),
assertECIs("id3", ["id4"]),
await assertFailRemoveECI('id1')
await assertECIs('id0', ['id1'])
], t.end);
});
await db.newPicoYieldable({parent_id: 'id0'})
await assertECIs('id3', ['id4'])
await assertFailRemoveECI('id4')
await assertECIs('id3', ['id4'])
})
test("DB - persistent variables", function(t){
var db = mkTestDB();
testA('DB - persistent variables', async function (t) {
var db = mkTestDB()
cocb.run(function*(){
var putEntVar = _.partial(cocb.wrap(db.putEntVar), "p", "r");
var getEntVar = _.partial(cocb.wrap(db.getEntVar), "p", "r");
var delEntVar = _.partial(cocb.wrap(db.delEntVar), "p", "r");
var toObj = cocb.wrap(db.toObj);
var put = _.partial(db.putEntVarYieldable, 'p', 'r')
var get = _.partial(db.getEntVarYieldable, 'p', 'r')
var del = _.partial(db.delEntVarYieldable, 'p', 'r')
var toObj = db.toObjYieldable
var data;
var data
yield putEntVar("foo", null, [1, 2]);
data = yield getEntVar("foo", null);
t.deepEquals(data, [1, 2]);
t.ok(ktypes.isArray(data));
await put('foo', null, [1, 2])
data = await get('foo', null)
t.deepEquals(data, [1, 2])
t.ok(ktypes.isArray(data))
yield putEntVar("foo", null, {a: 3, b: 4});
data = yield getEntVar("foo", null);
t.deepEquals(data, {a: 3, b: 4});
t.ok(ktypes.isMap(data));
await put('foo', null, {a: 3, b: 4})
data = await get('foo', null)
t.deepEquals(data, {a: 3, b: 4})
t.ok(ktypes.isMap(data))
yield delEntVar("foo", null);
data = yield getEntVar("foo", null);
t.deepEquals(data, void 0);
await del('foo', null)
data = await get('foo', null)
t.deepEquals(data, void 0)
yield putEntVar("foo", null, {one: 11, two: 22});
data = yield getEntVar("foo", null);
t.deepEquals(data, {one: 11, two: 22});
yield putEntVar("foo", null, {one: 11});
data = yield getEntVar("foo", null);
t.deepEquals(data, {one: 11});
await put('foo', null, {one: 11, two: 22})
data = await get('foo', null)
t.deepEquals(data, {one: 11, two: 22})
await put('foo', null, {one: 11})
data = await get('foo', null)
t.deepEquals(data, {one: 11})
data = yield getEntVar("foo", "one");
t.deepEquals(data, 11);
data = await get('foo', 'one')
t.deepEquals(data, 11)
yield putEntVar("foo", ["bar", "baz"], {qux: 1});
data = yield getEntVar("foo", null);
t.deepEquals(data, {one: 11, bar: {baz: {qux: 1}}});
await put('foo', ['bar', 'baz'], {qux: 1})
data = await get('foo', null)
t.deepEquals(data, {one: 11, bar: {baz: {qux: 1}}})
yield putEntVar("foo", ["bar", "asdf"], true);
data = yield getEntVar("foo", null);
t.deepEquals(data, {one: 11, bar: {
baz: {qux: 1},
asdf: true,
}});
await put('foo', ['bar', 'asdf'], true)
data = await get('foo', null)
t.deepEquals(data, {one: 11,
bar: {
baz: {qux: 1},
asdf: true
}})
yield putEntVar("foo", ["bar", "baz", "qux"], "wat?");
data = yield getEntVar("foo", null);
t.deepEquals(data, {one: 11, bar: {
baz: {qux: "wat?"},
asdf: true,
}});
data = yield getEntVar("foo", ["bar", "baz", "qux"]);
t.deepEquals(data, "wat?");
await put('foo', ['bar', 'baz', 'qux'], 'wat?')
data = await get('foo', null)
t.deepEquals(data, {one: 11,
bar: {
baz: {qux: 'wat?'},
asdf: true
}})
data = await get('foo', ['bar', 'baz', 'qux'])
t.deepEquals(data, 'wat?')
await del('foo', 'one')
data = await get('foo', null)
t.deepEquals(data, {bar: {baz: {qux: 'wat?'}, asdf: true}})
yield delEntVar("foo", "one");
data = yield getEntVar("foo", null);
t.deepEquals(data, {bar: {baz: {qux: "wat?"}, asdf: true}});
await del('foo', ['bar', 'asdf'])
data = await get('foo', null)
t.deepEquals(data, {bar: {baz: {qux: 'wat?'}}})
yield delEntVar("foo", ["bar", "asdf"]);
data = yield getEntVar("foo", null);
t.deepEquals(data, {bar: {baz: {qux: "wat?"}}});
await del('foo', ['bar', 'baz', 'qux'])
data = await get('foo', null)
t.deepEquals(data, {})
yield delEntVar("foo", ["bar", "baz", "qux"]);
data = yield getEntVar("foo", null);
t.deepEquals(data, {});
/// ////////////////////////////////////////////////////////////////////
// how other types are encoded
var action = function () {}
action.is_an_action = true
await put('act', null, action)
await put('fn', null, _.noop)
await put('nan', null, NaN)
///////////////////////////////////////////////////////////////////////
// how other types are encoded
var action = function(){};
action.is_an_action = true;
yield putEntVar("act", null, action);
yield putEntVar("fn", null, _.noop);
yield putEntVar("nan", null, NaN);
var dump = await toObj()
var dump = yield toObj();
t.equals(await get('fn', null), '[Function]')
t.deepEquals(dump.entvars.p.r.fn, {
type: 'String',
value: '[Function]'
})
t.equals(yield getEntVar("fn", null), "[Function]");
t.deepEquals(dump.entvars.p.r.fn, {
type: "String",
value: "[Function]",
});
t.equals(await get('act', null), '[Action]')
t.deepEquals(dump.entvars.p.r.act, {
type: 'String',
value: '[Action]'
})
t.equals(yield getEntVar("act", null), "[Action]");
t.deepEquals(dump.entvars.p.r.act, {
type: "String",
value: "[Action]",
});
t.equals(await get('nan', null), null)
t.deepEquals(dump.entvars.p.r.nan, {
type: 'Null',
value: null
})
})
t.equals(yield getEntVar("nan", null), null);
t.deepEquals(dump.entvars.p.r.nan, {
type: "Null",
value: null,
});
testA('DB - persistent variables array/map', async function (t) {
var db = mkTestDB()
var put = _.partial(db.putEntVarYieldable, 'p', 'r')
var get = _.partial(db.getEntVarYieldable, 'p', 'r')
var del = _.partial(db.delEntVarYieldable, 'p', 'r')
var toObj = db.toObjYieldable
var toJson = JSON.stringify
}, t.end);
});
var tst = async function (name, type, value, msg) {
var val = toJson(value)
t.equals(toJson((await toObj()).entvars.p.r[name]), '{"type":"' + type + '","value":' + val + '}', msg)
t.equals(toJson(await get(name, null)), val, msg)
}
test("DB - persistent variables array/map", function(t){
var db = mkTestDB();
cocb.run(function*(){
var put = _.partial(cocb.wrap(db.putEntVar), "p", "r");
var get = _.partial(cocb.wrap(db.getEntVar), "p", "r");
var del = _.partial(cocb.wrap(db.delEntVar), "p", "r");
var toObj = cocb.wrap(db.toObj);
var toJson = JSON.stringify;
await put('foo', [0], 'aaa')
await put('foo', [1], 'bbb')
await tst('foo', 'Array', ['aaa', 'bbb'], '`foo` is infered to be an array based on the int index')
var tst = function*(name, type, value, msg){
var val = toJson(value);
t.equals(toJson((yield toObj()).entvars.p.r[name]), "{\"type\":\""+type+"\",\"value\":"+val+"}", msg);
t.equals(toJson(yield get(name)), val, msg);
};
// Now should change to a map b/c the key is not an int index
await put('foo', ['wat'], 'da')
await tst('foo', 'Map', {0: 'aaa', 1: 'bbb', wat: 'da'}, '`foo` is now a map')
yield put("foo", [0], "aaa");
yield put("foo", [1], "bbb");
yield tst("foo", "Array", ["aaa", "bbb"], "`foo` is infered to be an array based on the int index");
// once a map, always a map
await del('foo', ['wat'])
await tst('foo', 'Map', {0: 'aaa', 1: 'bbb'}, '`foo` is still a map')
await put('foo', [2], 'ccc')
await tst('foo', 'Map', {0: 'aaa', 1: 'bbb', 2: 'ccc'}, '`foo` is still a map')
// Now should change to a map b/c the key is not an int index
yield put("foo", ["wat"], "da");
yield tst("foo", "Map", {0: "aaa", 1: "bbb", wat: "da"}, "`foo` is now a map");
// infered as map if it's a string
await put('bar', ['0'], 'aaa')
await tst('bar', 'Map', {0: 'aaa'}, '`bar` is a map since the first key was a string')
// once a map, always a map
yield del("foo", ["wat"]);
yield tst("foo", "Map", {0: "aaa", 1: "bbb"}, "`foo` is still a map");
yield put("foo", [2], "ccc");
yield tst("foo", "Map", {0: "aaa", 1: "bbb", 2: "ccc"}, "`foo` is still a map");
// infered as an Array b/c the key is a positive integer
await put('baz', [2], 'ccc')
await tst('baz', 'Array', [null, null, 'ccc'], '`baz` is an Array')
// infered as map if it's a string
yield put("bar", ["0"], "aaa");
yield tst("bar", "Map", {0: "aaa"}, "`bar` is a map since the first key was a string");
// now it's a map b/c the key is a string
await put('baz', ['1'], 'bbb')
await tst('baz', 'Map', {1: 'bbb', 2: 'ccc'}, '`baz` is now a Map')
// infered as an Array b/c the key is a positive integer
yield put("baz", [2], "ccc");
yield tst("baz", "Array", [null, null, "ccc"], "`baz` is an Array");
// now it's a map b/c the key is a string
yield put("baz", ["1"], "bbb");
yield tst("baz", "Map", {1: "bbb", 2: "ccc"}, "`baz` is now a Map");
// initialzed as array should db dump as an array
yield put("qux", null, ["aaa"]);
yield tst("qux", "Array", ["aaa"], "`qux` is an Array");
}, t.end);
});
// initialzed as array should db dump as an array
await put('qux', null, ['aaa'])
await tst('qux', 'Array', ['aaa'], '`qux` is an Array')
})

@@ -1,30 +0,30 @@

var _ = require("lodash");
var _ = require('lodash')
module.exports = function(ldb, opts, onData, callback_orig){
var has_calledback = false;
var callback = function(){
if(has_calledback) return;
has_calledback = true;
callback_orig.apply(null, arguments);
};
module.exports = function (ldb, opts, onData, callbackOrig) {
var hasCalledback = false
var callback = function () {
if (hasCalledback) return
hasCalledback = true
callbackOrig.apply(null, arguments)
}
if(_.has(opts, "prefix")){
opts = _.assign({}, opts, {
gte: opts.prefix,
lte: opts.prefix.concat([undefined])//bytewise sorts with null at the bottom and undefined at the top
});
delete opts.prefix;
}
var s = ldb.createReadStream(opts);
var stopRange = function(){
s.destroy();
callback();
};
s.on("error", function(err){
callback(err);
});
s.on("end", callback);
s.on("data", function(data){
onData(data, stopRange);
});
};
if (_.has(opts, 'prefix')) {
opts = _.assign({}, opts, {
gte: opts.prefix,
lte: opts.prefix.concat([undefined])// bytewise sorts with null at the bottom and undefined at the top
})
delete opts.prefix
}
var s = ldb.createReadStream(opts)
var stopRange = function () {
s.destroy()
callback()
}
s.on('error', function (err) {
callback(err)
})
s.on('end', callback)
s.on('data', function (data) {
onData(data, stopRange)
})
}

@@ -1,14 +0,14 @@

var _ = require("lodash");
var commentsRegExp = require("comment-regex");
var _ = require('lodash')
var commentsRegExp = require('comment-regex')
module.exports = function(src){
if(!_.isString(src)){
return;
}
var src_no_comments = src.replace(commentsRegExp(), " ");
var m = /^\s*ruleset\s+([^\s{]+)/.exec(src_no_comments);
if(!m){
return;
}
return m[1];
};
module.exports = function (src) {
if (!_.isString(src)) {
return
}
var srcNoComments = src.replace(commentsRegExp(), ' ')
var m = /^\s*ruleset\s+([^\s{]+)/.exec(srcNoComments)
if (!m) {
return
}
return m[1]
}

@@ -1,13 +0,13 @@

var fn = require("./extractRulesetID");
var test = require("tape");
var fn = require('./extractRulesetID')
var test = require('tape')
test("extractRulesetID", function(t){
t.equals(fn(""), undefined);
t.equals(fn(" "), undefined);
t.equals(fn("/* ruleset not {} */ ruleset blah.ok.bye "), "blah.ok.bye");
t.equals(fn("ruleset\n\tio.picolabs.cool-rs{}"), "io.picolabs.cool-rs");
t.equals(fn("rulesetok{}"), undefined);
t.equals(fn(null), undefined);
t.equals(fn(), undefined);
t.end();
});
test('extractRulesetID', function (t) {
t.equals(fn(''), undefined)
t.equals(fn(' '), undefined)
t.equals(fn('/* ruleset not {} */ ruleset blah.ok.bye '), 'blah.ok.bye')
t.equals(fn('ruleset\n\tio.picolabs.cool-rs{}'), 'io.picolabs.cool-rs')
t.equals(fn('rulesetok{}'), undefined)
t.equals(fn(null), undefined)
t.equals(fn(), undefined)
t.end()
})

@@ -1,27 +0,27 @@

var fs = require("fs");
var urllib = require("url");
var request = require("request");
var fs = require('fs')
var urllib = require('url')
var request = require('request')
var httpGetKRL = function(url, callback){
request(url, function(err, resp, body){
if(err){
return callback(err);
}
if(resp.statusCode !== 200){
return callback(new Error("Got a statusCode=" + resp.statusCode + " for: " + url));
}
callback(null, body);
});
};
var httpGetKRL = function (url, callback) {
request(url, function (err, resp, body) {
if (err) {
return callback(err)
}
if (resp.statusCode !== 200) {
return callback(new Error('Got a statusCode=' + resp.statusCode + ' for: ' + url))
}
callback(null, body)
})
}
module.exports = function(url, callback){
var url_parsed = urllib.parse(url);
if(url_parsed.protocol === "file:"){
fs.readFile(decodeURI(url_parsed.path), function(err, data){
if(err) return callback(err);
callback(null, data.toString());
});
return;
}
httpGetKRL(url, callback);
};
module.exports = function (url, callback) {
var urlParsed = urllib.parse(url)
if (urlParsed.protocol === 'file:') {
fs.readFile(decodeURI(urlParsed.path), function (err, data) {
if (err) return callback(err)
callback(null, data.toString())
})
return
}
httpGetKRL(url, callback)
}

@@ -1,689 +0,696 @@

var _ = require("lodash");
var DB = require("./DB");
var cocb = require("co-callback");
var cuid = require("cuid");
var async = require("async");
var ktypes = require("krl-stdlib/types");
var runKRL = require("./runKRL");
var Modules = require("./modules");
var DepGraph = require("dependency-graph").DepGraph;
var PicoQueue = require("./PicoQueue");
var Scheduler = require("./Scheduler");
var runAction = require("./runAction");
var cleanEvent = require("./cleanEvent");
var cleanQuery = require("./cleanQuery");
var krl_stdlib = require("krl-stdlib");
var getKRLByURL = require("./getKRLByURL");
var SymbolTable = require("symbol-table");
var EventEmitter = require("events");
var processEvent = require("./processEvent");
var processQuery = require("./processQuery");
var ChannelPolicy = require("./ChannelPolicy");
var RulesetRegistry = require("./RulesetRegistry");
var normalizeKRLArgs = require("./normalizeKRLArgs");
var _ = require('lodash')
var DB = require('./DB')
var util = require('util')
var cuid = require('cuid')
var async = require('async')
var ktypes = require('krl-stdlib/types')
var runKRL = require('./runKRL')
var Modules = require('./modules')
var DepGraph = require('dependency-graph').DepGraph
var PicoQueue = require('./PicoQueue')
var Scheduler = require('./Scheduler')
var runAction = require('./runAction')
var cleanEvent = require('./cleanEvent')
var cleanQuery = require('./cleanQuery')
var krlStdlib = require('krl-stdlib')
var getKRLByURL = require('./getKRLByURL')
var SymbolTable = require('symbol-table')
var EventEmitter = require('events')
var processEvent = require('./processEvent')
var processQuery = require('./processQuery')
var ChannelPolicy = require('./ChannelPolicy')
var RulesetRegistry = require('./RulesetRegistry')
var normalizeKRLArgs = require('./normalizeKRLArgs')
var applyFn = cocb.wrap(function*(fn, ctx, args){
if(ktypes.isAction(fn)){
throw new Error("actions can only be called in the rule action block");
}
if( ! ktypes.isFunction(fn)){
throw new Error("Not a function");
}
return yield fn(ctx, args);
});
function promiseCallback (callback) {
if (!callback) {
var promise = new Promise(function (resolve, reject) {
callback = function callback (err, value) {
err ? reject(err) : resolve(value)
}
})
callback.promise = promise
}
return callback
}
var log_levels = {
"info": true,
"debug": true,
"warn": true,
"error": true,
};
var applyFn = function (fn, ctx, args) {
if (ktypes.isAction(fn)) {
throw new Error('actions can only be called in the rule action block')
}
if (!ktypes.isFunction(fn)) {
throw new Error('Not a function')
}
return fn(ctx, args)
}
var krl_stdlib_wrapped = _.mapValues(krl_stdlib, function(fn, key){
if(cocb.isGeneratorFunction(fn)){
return cocb.wrap(fn);
var logLevels = {
'info': true,
'debug': true,
'warn': true,
'error': true
}
module.exports = function (conf) {
var db = DB(conf.db)
_.each(db, function (val, key) {
if (_.isFunction(val)) {
db[key + 'Yieldable'] = util.promisify(val)
}
return function(){
return Promise.resolve(fn.apply(void 0, arguments));
};
});
})
var host = conf.host
var rootRIDs = _.uniq(_.filter(conf.rootRIDs, _.isString))
var compileAndLoadRuleset = conf.compileAndLoadRuleset
var compileAndLoadRulesetYieldable = util.promisify(compileAndLoadRuleset)
module.exports = function(conf){
var db = DB(conf.db);
_.each(db, function(val, key){
if(_.isFunction(val)){
db[key + "Yieldable"] = cocb.wrap(val);
}
});
var host = conf.host;
var rootRIDs = _.uniq(_.filter(conf.rootRIDs, _.isString));
var compileAndLoadRuleset = conf.compileAndLoadRuleset;
var compileAndLoadRulesetYieldable = cocb.wrap(compileAndLoadRuleset);
var depGraph = new DepGraph()
var depGraph = new DepGraph();
var core = {
db: db,
host: host,
rsreg: RulesetRegistry()
}
var core = {
db: db,
host: host,
rsreg: RulesetRegistry()
};
var emitter = new EventEmitter()
var modules = Modules(core, conf.modules)
var emitter = new EventEmitter();
var modules = Modules(core, conf.modules);
var mkCTX = function (ctx) {
ctx.getMyKey = (function (rid) {
// we do it this way so all the keys are not leaked out to other built in modules or rulesets
return function (id) {
return core.rsreg.getKey(rid, id)
}
}(ctx.rid))// pass in the rid at mkCTX creation so it is not later mutated
var mkCTX = function(ctx){
ctx.getMyKey = (function(rid){
//we do it this way so all the keys are not leaked out to other built in modules or rulesets
return function(id){
return core.rsreg.getKey(rid, id);
};
}(ctx.rid));//pass in the rid at mkCTX creation so it is not later mutated
if (ctx.event) {
ctx.txn_id = ctx.event.txn_id
}
if (ctx.query) {
ctx.txn_id = ctx.query.txn_id
}
if(ctx.event){
ctx.txn_id = ctx.event.txn_id;
ctx.modules = modules
ctx.applyFn = applyFn
var pushCTXScope = function (ctx2) {
return mkCTX(_.assign({}, ctx2, {
rid: ctx.rid, // keep your original rid
scope: ctx.scope.push()
}))
}
ctx.mkFunction = function (paramOrder, fn) {
var fixArgs = _.partial(normalizeKRLArgs, paramOrder)
return function (ctx2, args) {
return fn(pushCTXScope(ctx2), fixArgs(args))
}
}
ctx.mkAction = function (paramOrder, fn) {
var fixArgs = _.partial(normalizeKRLArgs, paramOrder)
var actionFn = function (ctx2, args) {
return fn(pushCTXScope(ctx2), fixArgs(args), runAction)
}
actionFn.is_an_action = true
return actionFn
}
ctx.emit = function (type, val) {
var info = {}
info.rid = ctx.rid
info.txn_id = ctx.txn_id
if (ctx.pico_id) {
info.pico_id = ctx.pico_id
}
if (ctx.event) {
info.event = {
eci: ctx.event.eci,
eid: ctx.event.eid,
domain: ctx.event.domain,
type: ctx.event.type,
attrs: _.cloneDeep(ctx.event.attrs)
}
if(ctx.query){
ctx.txn_id = ctx.query.txn_id;
if (!info.eci) {
info.eci = ctx.event.eci
}
ctx.modules = modules;
ctx.applyFn = applyFn;
var pushCTXScope = function(ctx2){
return mkCTX(_.assign({}, ctx2, {
rid: ctx.rid,//keep your original rid
scope: ctx.scope.push(),
}));
};
ctx.mkFunction = function(param_order, fn){
var fixArgs = _.partial(normalizeKRLArgs, param_order);
var pfn = cocb.wrap(fn);
return function(ctx2, args){
return pfn(pushCTXScope(ctx2), fixArgs(args));
};
};
ctx.mkAction = function(param_order, fn){
var fixArgs = _.partial(normalizeKRLArgs, param_order);
var pfn = cocb.wrap(fn);
var actionFn = function(ctx2, args){
return pfn(pushCTXScope(ctx2), fixArgs(args), runAction);
};
actionFn.is_an_action = true;
return actionFn;
};
ctx.emit = function(type, val){
var info = {};
info.rid = ctx.rid;
info.txn_id = ctx.txn_id;
if(ctx.pico_id){
info.pico_id = ctx.pico_id;
}
if(ctx.event){
info.event = {
eci: ctx.event.eci,
eid: ctx.event.eid,
domain: ctx.event.domain,
type: ctx.event.type,
attrs: _.cloneDeep(ctx.event.attrs),
};
if(!info.eci){
info.eci = ctx.event.eci;
}
}
if(ctx.query){
info.query = {
eci: ctx.query.eci,
rid: ctx.query.rid,
name: ctx.query.name,
args: ctx.query.args
};
if(!info.rid){
info.rid = ctx.query.rid;
}
if(!info.eci){
info.eci = ctx.query.eci;
}
}
//one reason `val` must come first is by convertion the "error"
//event's first argument is the Error object. If `info` comes first
//it will get confused thinking `info` is the error
emitter.emit(type, val, info);
};
ctx.log = function(level, val){
if(!_.has(log_levels, level)){
throw new Error("Unsupported log level: " + level);
}
//this 'log-' prefix distinguishes user declared log events from other system generated events
ctx.emit("log-" + level, val);
};
ctx.callKRLstdlib = function(fn_name, args){
if(_.isArray(args)){
args = [ctx].concat(args);
}else{
args[0] = ctx;
}
var fn = krl_stdlib_wrapped[fn_name];
if(fn===void 0){
throw new Error("Not an operator: " + fn_name);
}
return fn.apply(void 0, args);
};
//don't allow anyone to mutate ctx on the fly
Object.freeze(ctx);
return ctx;
};
core.mkCTX = mkCTX;
var initializeRulest = cocb.wrap(function*(rs){
rs.scope = SymbolTable();
rs.modules_used = {};
core.rsreg.setupOwnKeys(rs);
var use_array = _.values(rs.meta && rs.meta.use);
var i, use, dep_rs, ctx2;
for(i = 0; i < use_array.length; i++){
use = use_array[i];
if(use.kind !== "module"){
throw new Error("Unsupported 'use' kind: " + use.kind);
}
dep_rs = core.rsreg.get(use.rid);
if(!dep_rs){
throw new Error("Dependant module not loaded: " + use.rid);
}
ctx2 = mkCTX({
rid: dep_rs.rid,
scope: SymbolTable()
});
if(_.isFunction(dep_rs.meta && dep_rs.meta.configure)){
yield runKRL(dep_rs.meta.configure, ctx2);
}
if(_.isFunction(use["with"])){
yield runKRL(use["with"], mkCTX({
rid: rs.rid,//switch rid
scope: ctx2.scope//must share scope
}));
}
if(_.isFunction(dep_rs.global)){
yield runKRL(dep_rs.global, ctx2);
}
rs.modules_used[use.alias] = {
rid: use.rid,
scope: ctx2.scope,
provides: _.get(dep_rs, ["meta", "provides"], []),
};
core.rsreg.provideKey(rs.rid, use.rid);
}
if (ctx.query) {
info.query = {
eci: ctx.query.eci,
rid: ctx.query.rid,
name: ctx.query.name,
args: ctx.query.args
}
var ctx = mkCTX({
rid: rs.rid,
scope: rs.scope
});
if(_.isFunction(rs.meta && rs.meta.configure)){
yield runKRL(rs.meta.configure, ctx);
if (!info.rid) {
info.rid = ctx.query.rid
}
core.rsreg.put(rs);
if(_.isFunction(rs.global)){
yield runKRL(rs.global, ctx);
if (!info.eci) {
info.eci = ctx.query.eci
}
});
}
// one reason `val` must come first is by convertion the "error"
// event's first argument is the Error object. If `info` comes first
// it will get confused thinking `info` is the error
emitter.emit(type, val, info)
}
ctx.log = function (level, val) {
if (!_.has(logLevels, level)) {
throw new Error('Unsupported log level: ' + level)
}
// this 'log-' prefix distinguishes user declared log events from other system generated events
ctx.emit('log-' + level, val)
}
ctx.callKRLstdlib = function (fnName, args) {
if (_.isArray(args)) {
args = [ctx].concat(args)
} else {
args[0] = ctx
}
var fn = krlStdlib[fnName]
if (fn === void 0) {
throw new Error('Not an operator: ' + fnName)
}
return Promise.resolve(fn.apply(void 0, args))
}
// don't allow anyone to mutate ctx on the fly
Object.freeze(ctx)
return ctx
}
core.mkCTX = mkCTX
core.registerRuleset = function(krl_src, meta_data, callback){
cocb.run(function*(){
var data = yield db.storeRulesetYieldable(krl_src, meta_data);
var rid = data.rid;
var hash = data.hash;
var initializeRulest = async function (rs) {
rs.scope = SymbolTable()
rs.modules_used = {}
core.rsreg.setupOwnKeys(rs)
var rs = yield compileAndLoadRulesetYieldable({
rid: rid,
src: krl_src,
hash: hash
});
var useArray = _.values(rs.meta && rs.meta.use)
var i, use, depRs, ctx2
for (i = 0; i < useArray.length; i++) {
use = useArray[i]
if (use.kind !== 'module') {
throw new Error("Unsupported 'use' kind: " + use.kind)
}
depRs = core.rsreg.get(use.rid)
if (!depRs) {
throw new Error('Dependant module not loaded: ' + use.rid)
}
ctx2 = mkCTX({
rid: depRs.rid,
scope: SymbolTable()
})
if (_.isFunction(depRs.meta && depRs.meta.configure)) {
await runKRL(depRs.meta.configure, ctx2)
}
if (_.isFunction(use['with'])) {
await runKRL(use['with'], mkCTX({
rid: rs.rid, // switch rid
scope: ctx2.scope// must share scope
}))
}
if (_.isFunction(depRs.global)) {
await runKRL(depRs.global, ctx2)
}
rs.modules_used[use.alias] = {
rid: use.rid,
scope: ctx2.scope,
provides: _.get(depRs, ['meta', 'provides'], [])
}
core.rsreg.provideKey(rs.rid, use.rid)
}
var ctx = mkCTX({
rid: rs.rid,
scope: rs.scope
})
if (_.isFunction(rs.meta && rs.meta.configure)) {
await runKRL(rs.meta.configure, ctx)
}
core.rsreg.put(rs)
if (_.isFunction(rs.global)) {
await runKRL(rs.global, ctx)
}
}
if(depGraph.hasNode(rs.rid)){
// cleanup any left over dependencies with rid
_.each(depGraph.dependenciesOf(rs.rid), function(rid){
depGraph.removeDependency(rs.rid, rid);
});
}else{
depGraph.addNode(rs.rid);
}
core.registerRuleset = function (krlSrc, metaData, callback) {
(async function () {
var data = await db.storeRulesetYieldable(krlSrc, metaData)
var rid = data.rid
var hash = data.hash
try{
_.each(rs.meta && rs.meta.use, function(use){
if(use.kind === "module"){
try{
depGraph.addDependency(rs.rid, use.rid);
}catch(e){
throw new Error("Dependant module not loaded: " + use.rid);
}
}
});
var rs = await compileAndLoadRulesetYieldable({
rid: rid,
src: krlSrc,
hash: hash
})
// check for dependency cycles
depGraph.overallOrder();// this will throw if there is a cycle
if (depGraph.hasNode(rs.rid)) {
// cleanup any left over dependencies with rid
_.each(depGraph.dependenciesOf(rs.rid), function (rid) {
depGraph.removeDependency(rs.rid, rid)
})
} else {
depGraph.addNode(rs.rid)
}
// Now enable and initialize it
yield db.enableRulesetYieldable(hash);
yield initializeRulest(rs);
}catch(err){
core.rsreg.del(rs.rid);
depGraph.removeNode(rs.rid);
db.disableRuleset(rs.rid, _.noop);//undo enable if failed
throw err;
try {
_.each(rs.meta && rs.meta.use, function (use) {
if (use.kind === 'module') {
try {
depGraph.addDependency(rs.rid, use.rid)
} catch (e) {
throw new Error('Dependant module not loaded: ' + use.rid)
}
}
})
return {
rid: rs.rid,
hash: hash
};
}, function(err, data){
process.nextTick(function(){
//wrapping in nextTick resolves strange issues with UnhandledPromiseRejectionWarning
//when infact we are handling the rejection
callback(err, data);
});
});
};
// check for dependency cycles
depGraph.overallOrder()// this will throw if there is a cycle
var picoQ = PicoQueue(function(pico_id, type, data, callback){
//now handle the next task on the pico queue
if(type === "event"){
var event = data;
event.timestamp = new Date(event.timestamp);//convert from JSON string to date
processEvent(core, mkCTX({
event: event,
pico_id: pico_id
}), callback);
}else if(type === "query"){
processQuery(core, mkCTX({
query: data,
pico_id: pico_id
}), callback);
}else{
callback(new Error("invalid PicoQueue type:" + type));
}
});
// Now enable and initialize it
await db.enableRulesetYieldable(hash)
await initializeRulest(rs)
} catch (err) {
core.rsreg.del(rs.rid)
depGraph.removeNode(rs.rid)
db.disableRuleset(rs.rid, _.noop)// undo enable if failed
throw err
}
var picoTask = function(type, data_orig, callback_orig){
var callback = _.isFunction(callback_orig) ? callback_orig : _.noop;
var data;
try{
//validate + normalize event/query, and make sure is not mutated
if(type === "event"){
data = cleanEvent(data_orig);
if(data.eid === "none"){
data.eid = cuid();
}
}else if(type === "query"){
data = cleanQuery(data_orig);
}else{
throw new Error("invalid PicoQueue type:" + type);
}
}catch(err){
emitter.emit("error", err);
callback(err);
return;
}
return {
rid: rs.rid,
hash: hash
}
}())
.then(function (data) {
callback(null, data)
})
.catch(function (err) {
process.nextTick(function () {
// wrapping in nextTick resolves strange issues with UnhandledPromiseRejectionWarning
// when infact we are handling the rejection
callback(err)
})
})
}
//events and queries have a txn_id and timestamp
data.txn_id = cuid();
data.timestamp = conf.___core_testing_mode && _.isDate(data_orig.timestamp)
? data_orig.timestamp
: new Date();
var picoQ = PicoQueue(function (picoId, type, data) {
// now handle the next task on the pico queue
if (type === 'event') {
var event = data
event.timestamp = new Date(event.timestamp)// convert from JSON string to date
return processEvent(core, mkCTX({
event: event,
pico_id: picoId
}))
} else if (type === 'query') {
return processQuery(core, mkCTX({
query: data,
pico_id: picoId
}))
} else {
throw new Error('invalid PicoQueue type:' + type)
}
})
db.getChannelAndPolicy(data.eci, function(err, chann){
if(err){
emitter.emit("error", err);
callback(err);
return;
}
var picoTask = function (type, dataOrig, callbackOrig) {
var callback = _.isFunction(callbackOrig) ? callbackOrig : _.noop
var data
try {
// validate + normalize event/query, and make sure is not mutated
if (type === 'event') {
data = cleanEvent(dataOrig)
if (data.eid === 'none') {
data.eid = cuid()
}
} else if (type === 'query') {
data = cleanQuery(dataOrig)
} else {
throw new Error('invalid PicoQueue type:' + type)
}
} catch (err) {
emitter.emit('error', err)
callback(err)
return
}
var pico_id = chann.pico_id;
// events and queries have a txn_id and timestamp
data.txn_id = cuid()
data.timestamp = conf.___core_testing_mode && _.isDate(dataOrig.timestamp)
? dataOrig.timestamp
: new Date()
var emit = mkCTX({
pico_id: pico_id,
event: type === "event" ? data : void 0,
query: type === "query" ? data : void 0,
}).emit;
db.getChannelAndPolicy(data.eci, function (err, chann) {
if (err) {
emitter.emit('error', err)
callback(err)
return
}
emit("episode_start");
if(type === "event"){
emit("debug", "event received: " + data.domain + "/" + data.type);
}else if(type === "query"){
emit("debug", "query received: " + data.rid + "/" + data.name);
}
try{
ChannelPolicy.assert(chann.policy, type, data);
}catch(e){
onDone(e);
return;
}
var picoId = chann.pico_id
picoQ.enqueue(pico_id, type, data, onDone);
var emit = mkCTX({
pico_id: picoId,
event: type === 'event' ? data : void 0,
query: type === 'query' ? data : void 0
}).emit
emit("debug", type + " added to pico queue: " + pico_id);
emit('episode_start')
if (type === 'event') {
emit('debug', 'event received: ' + data.domain + '/' + data.type)
} else if (type === 'query') {
emit('debug', 'query received: ' + data.rid + '/' + data.name)
}
try {
ChannelPolicy.assert(chann.policy, type, data)
} catch (e) {
onDone(e)
return
}
function onDone(err, data){
if(err){
emit("error", err);
}else{
emit("debug", data);
}
//there should be no more emits after "episode_stop"
emit("episode_stop");
callback(err, data);
}
});
};
picoQ.enqueue(picoId, type, data, onDone)
core.signalEvent = function(event, callback){
picoTask("event", event, callback);
};
emit('debug', type + ' added to pico queue: ' + picoId)
core.runQuery = function(query, callback){
picoTask("query", query, callback);
};
function onDone (err, data) {
if (err) {
emit('error', err)
} else {
emit('debug', data)
}
// there should be no more emits after "episode_stop"
emit('episode_stop')
callback(err, data)
}
})
}
var registerAllEnabledRulesets = function(callback){
core.signalEvent = function (event, callback) {
callback = promiseCallback(callback)
picoTask('event', event, callback)
return callback.promise
}
var rs_by_rid = {};
core.runQuery = function (query, callback) {
callback = promiseCallback(callback)
picoTask('query', query, callback)
return callback.promise
}
async.series([
//
// load Rulesets and track dependencies
//
function(nextStep){
var onRID = function(rid, next){
db.getEnabledRuleset(rid, function(err, data){
if(err) return next(err);
compileAndLoadRuleset({
rid: rid,
src: data.src,
hash: data.hash
}, function(err, rs){
if(err){
//Emit an error and don't halt the engine
var err2 = new Error("Failed to compile " + rid + "! It is now disabled. You'll need to edit and re-register it.\nCause: " + err);
err2.orig_error = err;
emitter.emit("error", err2, {rid: rid});
//disable the ruleset since it's broken
db.disableRuleset(rid, next);
return;
}
rs_by_rid[rs.rid] = rs;
depGraph.addNode(rs.rid);
next();
});
});
};
db.listAllEnabledRIDs(function(err, rids){
if(err) return nextStep(err);
async.each(rids, onRID, nextStep);
});
},
var registerAllEnabledRulesets = function (callback) {
var rsByRid = {}
async.series([
//
// load Rulesets and track dependencies
//
function (nextStep) {
var onRID = function (rid, next) {
db.getEnabledRuleset(rid, function (err, data) {
if (err) return next(err)
compileAndLoadRuleset({
rid: rid,
src: data.src,
hash: data.hash
}, function (err, rs) {
if (err) {
// Emit an error and don't halt the engine
var err2 = new Error('Failed to compile ' + rid + "! It is now disabled. You'll need to edit and re-register it.\nCause: " + err)
err2.orig_error = err
emitter.emit('error', err2, {rid: rid})
// disable the ruleset since it's broken
db.disableRuleset(rid, next)
return
}
rsByRid[rs.rid] = rs
depGraph.addNode(rs.rid)
next()
})
})
}
db.listAllEnabledRIDs(function (err, rids) {
if (err) return nextStep(err)
async.each(rids, onRID, nextStep)
})
},
//
// initialize Rulesets according to dependency order
//
function(nextStep){
_.each(rs_by_rid, function(rs){
_.each(rs.meta && rs.meta.use, function(use){
if(use.kind === "module"){
depGraph.addDependency(rs.rid, use.rid);
}
});
});
var getRidOrder = function getRidOrder(){
try{
return depGraph.overallOrder();
}catch(err){
var m = /Dependency Cycle Found: (.*)$/.exec(err + "");
if(!m){
throw err;
}
var cycle_rids = _.uniq(m[1].split(" -> "));
_.each(cycle_rids, function(rid){
// remove the rids from the graph and disable it
depGraph.removeNode(rid);
db.disableRuleset(rid, _.noop);
//
// initialize Rulesets according to dependency order
//
function (nextStep) {
_.each(rsByRid, function (rs) {
_.each(rs.meta && rs.meta.use, function (use) {
if (use.kind === 'module') {
depGraph.addDependency(rs.rid, use.rid)
}
})
})
var getRidOrder = function getRidOrder () {
try {
return depGraph.overallOrder()
} catch (err) {
var m = /Dependency Cycle Found: (.*)$/.exec(err + '')
if (!m) {
throw err
}
var cycleRids = _.uniq(m[1].split(' -> '))
_.each(cycleRids, function (rid) {
// remove the rids from the graph and disable it
depGraph.removeNode(rid)
db.disableRuleset(rid, _.noop)
// Let the user know the rid was disabled
var err2 = new Error("Failed to initialize " + rid + ", it's in a dependency cycle. It is now disabled. You'll need to resolve the cycle then re-register it.\nCause: " + err);
err2.orig_error = err;
emitter.emit("error", err2, {rid: rid});
});
return getRidOrder();
}
};
// order they need to be loaded for dependencies to work
var rid_order = getRidOrder();
// Let the user know the rid was disabled
var err2 = new Error('Failed to initialize ' + rid + ", it's in a dependency cycle. It is now disabled. You'll need to resolve the cycle then re-register it.\nCause: " + err)
err2.orig_error = err
emitter.emit('error', err2, {rid: rid})
})
return getRidOrder()
}
}
// order they need to be loaded for dependencies to work
var ridOrder = getRidOrder()
async.eachSeries(rid_order, function(rid, next){
var rs = rs_by_rid[rid];
async.eachSeries(ridOrder, function (rid, next) {
var rs = rsByRid[rid]
initializeRulest(rs).then(function(){
next();
}, function(err){
process.nextTick(function(){
//wrapping in nextTick resolves strange issues with UnhandledPromiseRejectionWarning
//when infact we are handling the rejection
initializeRulest(rs).then(function () {
next()
}, function (err) {
process.nextTick(function () {
// wrapping in nextTick resolves strange issues with UnhandledPromiseRejectionWarning
// when infact we are handling the rejection
//Emit an error and don't halt the engine
var err2 = new Error("Failed to initialize " + rid + "! It is now disabled. You'll need to edit and re-register it.\nCause: " + err);
err2.orig_error = err;
emitter.emit("error", err2, {rid: rid});
//disable the ruleset since it's broken
db.disableRuleset(rid, next);
});
});
}, nextStep);
},
], callback);
};
// Emit an error and don't halt the engine
var err2 = new Error('Failed to initialize ' + rid + "! It is now disabled. You'll need to edit and re-register it.\nCause: " + err)
err2.orig_error = err
emitter.emit('error', err2, {rid: rid})
// disable the ruleset since it's broken
db.disableRuleset(rid, next)
})
})
}, nextStep)
}
], callback)
}
core.unregisterRuleset = function(rid, callback){
//first assert rid is not depended on as a module
try{
core.rsreg.assertNoDependants(rid);
}catch(err){
callback(err);
return;
}
db.isRulesetUsed(rid, function(err, is_used){
if(err) return callback(err);
if(is_used){
callback(new Error("Unable to unregister \"" + rid + "\": it is installed on at least one pico"));
return;
}
db.deleteRuleset(rid, function(err){
if(err) return callback(err);
core.unregisterRuleset = function (rid, callback) {
// first assert rid is not depended on as a module
try {
core.rsreg.assertNoDependants(rid)
} catch (err) {
callback(err)
return
}
db.isRulesetUsed(rid, function (err, isUsed) {
if (err) return callback(err)
if (isUsed) {
callback(new Error('Unable to unregister "' + rid + '": it is installed on at least one pico'))
return
}
db.deleteRuleset(rid, function (err) {
if (err) return callback(err)
core.rsreg.del(rid);
core.rsreg.del(rid)
callback();
});
});
};
callback()
})
})
}
core.scheduler = Scheduler({
db: db,
onError: function(err){
var info = {scheduler: true};
emitter.emit("error", err, info);
},
onEvent: function(event){
core.signalEvent(event);
},
is_test_mode: !!conf.___core_testing_mode,
});
core.scheduler = Scheduler({
db: db,
onError: function (err) {
var info = {scheduler: true}
emitter.emit('error', err, info)
},
onEvent: function (event) {
core.signalEvent(event)
},
is_test_mode: !!conf.___core_testing_mode
})
core.registerRulesetURL = function(url, callback){
getKRLByURL(url, function(err, src){
core.registerRuleset(src, {url: url}, callback);
});
};
core.flushRuleset = function(rid, callback){
db.getEnabledRuleset(rid, function(err, rs_data){
if(err) return callback(err);
var url = rs_data.url;
if(!_.isString(url)){
callback(new Error("cannot flush a locally registered ruleset"));
return;
}
core.registerRulesetURL(url, callback);
});
};
core.installRuleset = function(pico_id, rid, callback){
db.assertPicoID(pico_id, function(err, pico_id){
if(err) return callback(err);
core.registerRulesetURL = function (url, callback) {
getKRLByURL(url, function (err, src) {
if (err) return callback(err)
core.registerRuleset(src, {url: url}, callback)
})
}
core.flushRuleset = function (rid, callback) {
db.getEnabledRuleset(rid, function (err, rsData) {
if (err) return callback(err)
var url = rsData.url
if (!_.isString(url)) {
callback(new Error('cannot flush a locally registered ruleset'))
return
}
core.registerRulesetURL(url, callback)
})
}
core.installRuleset = function (picoId, rid, callback) {
db.assertPicoID(picoId, function (err, picoId) {
if (err) return callback(err)
db.hasEnabledRid(rid, function(err, has){
if(err) return callback(err);
if(!has) return callback(new Error("This rid is not found and/or enabled: " + rid));
db.hasEnabledRid(rid, function (err, has) {
if (err) return callback(err)
if (!has) return callback(new Error('This rid is not found and/or enabled: ' + rid))
db.addRulesetToPico(pico_id, rid, callback);
});
});
};
db.addRulesetToPico(picoId, rid, callback)
})
})
}
core.uninstallRuleset = function(pico_id, rid, callback){
db.assertPicoID(pico_id, function(err, pico_id){
if(err) return callback(err);
core.uninstallRuleset = function (picoId, rid, callback) {
db.assertPicoID(picoId, function (err, picoId) {
if (err) return callback(err)
db.removeRulesetFromPico(pico_id, rid, callback);
});
};
db.removeRulesetFromPico(picoId, rid, callback)
})
}
var resumeScheduler = function(callback){
db.listScheduled(function(err, vals){
if(err) return callback(err);
var resumeScheduler = function (callback) {
db.listScheduled(function (err, vals) {
if (err) return callback(err)
//resume the cron jobs
_.each(vals, function(val){
if(!_.isString(val.timespec)){
return;
}
core.scheduler.addCron(val.timespec, val.id, val.event);
});
// resume the cron jobs
_.each(vals, function (val) {
if (!_.isString(val.timespec)) {
return
}
core.scheduler.addCron(val.timespec, val.id, val.event)
})
//resume `schedule .. at` queue
core.scheduler.update();
// resume `schedule .. at` queue
core.scheduler.update()
callback();
});
};
callback()
})
}
var pe = {
emitter: emitter,
var pe = {
emitter: emitter,
signalEvent: core.signalEvent,
runQuery: core.runQuery,
signalEvent: core.signalEvent,
runQuery: core.runQuery,
getRootECI: function (callback) {
db.getRootPico(function (err, rootPico) {
if (err) return callback(err)
callback(null, rootPico.admin_eci)
})
},
getRootECI: function(callback){
db.getRootPico(function(err, root_pico){
if(err) return callback(err);
callback(null, root_pico.admin_eci);
});
},
/// //////////////////
// vvv deprecated vvv
registerRuleset: core.registerRuleset,
registerRulesetURL: core.registerRulesetURL,
flushRuleset: core.flushRuleset,
unregisterRuleset: core.unregisterRuleset,
/////////////////////
// vvv deprecated vvv
registerRuleset: core.registerRuleset,
registerRulesetURL: core.registerRulesetURL,
flushRuleset: core.flushRuleset,
unregisterRuleset: core.unregisterRuleset,
removeChannel: db.removeChannel,
installRuleset: core.installRuleset,
uninstallRuleset: core.uninstallRuleset,
removePico: db.removePico,
removeChannel: db.removeChannel,
installRuleset: core.installRuleset,
uninstallRuleset: core.uninstallRuleset,
removePico: db.removePico,
putEntVar: db.putEntVar,
getEntVar: db.getEntVar,
delEntVar: db.delEntVar,
putEntVar: db.putEntVar,
getEntVar: db.getEntVar,
delEntVar: db.delEntVar,
dbDump: db.toObj
// ^^^ deprecated ^^^
/// //////////////////
}
if (conf.___core_testing_mode) {
pe.newPico = db.newPico
pe.newPolicy = db.newPolicy
pe.newChannel = db.newChannel
pe.scheduleEventAtYieldable = db.scheduleEventAtYieldable
pe.scheduler = core.scheduler
pe.modules = modules
}
dbDump: db.toObj,
// ^^^ deprecated ^^^
/////////////////////
};
if(conf.___core_testing_mode){
pe.newPico = db.newPico;
pe.newPolicy = db.newPolicy;
pe.newChannel = db.newChannel;
pe.scheduleEventAtYieldable = db.scheduleEventAtYieldable;
pe.scheduler = core.scheduler;
pe.modules = modules;
}
pe.start = function (systemRulesets, callback) {
callback = promiseCallback(callback)
async.series([
db.checkAndRunMigrations,
function (nextStep) {
// compile+store+enable systemRulesets first
async.each(systemRulesets, function (systemRuleset, next) {
var krlSrc = systemRuleset.src
var metaData = systemRuleset.meta
db.storeRuleset(krlSrc, metaData, function (err, data) {
if (err) return next(err)
compileAndLoadRuleset({
rid: data.rid,
src: krlSrc,
hash: data.hash
}, function (err, rs) {
if (err) return next(err)
db.enableRuleset(data.hash, function (err) {
next(err, {rs: rs, hash: data.hash})
})
})
})
}, nextStep)
},
function (next) {
registerAllEnabledRulesets(next)
},
function (next) {
if (_.isEmpty(rootRIDs)) {
return next()
}
db.getRootPico(function (err, rootPico) {
if (err && !err.notFound) {
return next(err)
} else if (!err) {
return next()
}
db.newPico({}, next)
})
},
function (next) {
if (_.isEmpty(rootRIDs)) {
return next()
}
db.getRootPico(function (err, rootPico) {
if (err) return next(err)
pe.start = function(system_rulesets, callback){
async.series([
db.checkAndRunMigrations,
function(nextStep){
// compile+store+enable system_rulesets first
async.each(system_rulesets, function(system_ruleset, next){
var krl_src = system_ruleset.src;
var meta_data = system_ruleset.meta;
db.storeRuleset(krl_src, meta_data, function(err, data){
if(err) return next(err);
compileAndLoadRuleset({
rid: data.rid,
src: krl_src,
hash: data.hash
}, function(err, rs){
if(err) return next(err);
db.enableRuleset(data.hash, function(err){
next(err, {rs: rs, hash: data.hash});
});
});
});
}, nextStep);
},
function(next){
registerAllEnabledRulesets(next);
},
function(next){
if(_.isEmpty(rootRIDs)){
return next();
}
db.getRootPico(function(err, root_pico){
if(err && ! err.notFound){
return next(err);
}else if(!err){
return next();
}
db.newPico({}, next);
});
},
function(next){
if(_.isEmpty(rootRIDs)){
return next();
}
db.getRootPico(function(err, root_pico){
if(err) return next(err);
db.ridsOnPico(rootPico.id, function (err, rids) {
if (err) return next(err)
db.ridsOnPico(root_pico.id, function(err, rids){
if(err) return next(err);
var toInstall = []
_.each(rootRIDs, function (rid) {
if (!_.includes(rids, rid)) {
toInstall.push(rid)
}
})
var to_install = [];
_.each(rootRIDs, function(r_rid){
if( ! _.includes(rids, r_rid)){
to_install.push(r_rid);
}
});
async.eachSeries(toInstall, function (rid, next) {
core.installRuleset(rootPico.id, rid, next)
}, next)
})
})
},
resumeScheduler
], callback)
return callback.promise
}
async.eachSeries(to_install, function(rid, next){
core.installRuleset(root_pico.id, rid, next);
}, next);
});
});
},
resumeScheduler,
], callback);
};
return pe;
};
return pe
}

@@ -1,32 +0,31 @@

var dbRange = require("../dbRange");
var dbRange = require('../dbRange')
module.exports = {
up: function(ldb, callback){
// /resultset/:rid/vars/:varname -> /appvars/:rid/:varname
up: function (ldb, callback) {
// /resultset/:rid/vars/:varname -> /appvars/:rid/:varname
var to_batch = [];
var dbOps = []
dbRange(ldb, {
prefix: ["resultset"],
}, function(data){
if(data.key[2] !== "vars"){
return;
}
var rid = data.key[1];
var varname = data.key[3];
dbRange(ldb, {
prefix: ['resultset']
}, function (data) {
if (data.key[2] !== 'vars') {
return
}
var rid = data.key[1]
var varname = data.key[3]
to_batch.push({
type: "put",
key: ["appvars", rid, varname],
value: data.value,
});
dbOps.push({
type: 'put',
key: ['appvars', rid, varname],
value: data.value
})
to_batch.push({type: "del", key: data.key});
dbOps.push({type: 'del', key: data.key})
}, function (err) {
if (err) return callback(err)
}, function(err){
if(err) return callback(err);
ldb.batch(to_batch, callback);
});
},
};
ldb.batch(dbOps, callback)
})
}
}

@@ -1,33 +0,32 @@

var dbRange = require("../dbRange");
var dbRange = require('../dbRange')
module.exports = {
up: function(ldb, callback){
// /pico/:pico_id/:rid/vars/:varname -> /entvars/:pico_id/:rid/:varname
up: function (ldb, callback) {
// /pico/:pico_id/:rid/vars/:varname -> /entvars/:pico_id/:rid/:varname
var to_batch = [];
var dbOps = []
dbRange(ldb, {
prefix: ["pico"],
}, function(data){
if(data.key[3] !== "vars"){
return;
}
var pico_id = data.key[1];
var rid = data.key[2];
var varname = data.key[4];
dbRange(ldb, {
prefix: ['pico']
}, function (data) {
if (data.key[3] !== 'vars') {
return
}
var picoId = data.key[1]
var rid = data.key[2]
var varname = data.key[4]
to_batch.push({
type: "put",
key: ["entvars", pico_id, rid, varname],
value: data.value,
});
dbOps.push({
type: 'put',
key: ['entvars', picoId, rid, varname],
value: data.value
})
to_batch.push({type: "del", key: data.key});
dbOps.push({type: 'del', key: data.key})
}, function (err) {
if (err) return callback(err)
}, function(err){
if(err) return callback(err);
ldb.batch(to_batch, callback);
});
},
};
ldb.batch(dbOps, callback)
})
}
}

@@ -1,31 +0,30 @@

var dbRange = require("../dbRange");
var dbRange = require('../dbRange')
module.exports = {
up: function(ldb, callback){
// /channel/:eci/pico_id -> /eci-to-pico_id/:eci
up: function (ldb, callback) {
// /channel/:eci/pico_id -> /eci-to-pico_id/:eci
var to_batch = [];
var dbOps = []
dbRange(ldb, {
prefix: ["channel"],
}, function(data){
if(data.key[2] !== "pico_id"){
return;
}
var eci = data.key[1];
dbRange(ldb, {
prefix: ['channel']
}, function (data) {
if (data.key[2] !== 'pico_id') {
return
}
var eci = data.key[1]
to_batch.push({
type: "put",
key: ["eci-to-pico_id", eci],
value: data.value,
});
dbOps.push({
type: 'put',
key: ['eci-to-pico_id', eci],
value: data.value
})
to_batch.push({type: "del", key: data.key});
dbOps.push({type: 'del', key: data.key})
}, function (err) {
if (err) return callback(err)
}, function(err){
if(err) return callback(err);
ldb.batch(to_batch, callback);
});
},
};
ldb.batch(dbOps, callback)
})
}
}

@@ -1,42 +0,41 @@

var _ = require("lodash");
var dbRange = require("../dbRange");
var _ = require('lodash')
var dbRange = require('../dbRange')
module.exports = {
up: function(ldb, callback){
// /pico/:pico_id/channel/:eci -> /channel/:eci {... pico_id}
// /pico-eci-list/:pico_id/:eci true
up: function (ldb, callback) {
// /pico/:pico_id/channel/:eci -> /channel/:eci {... pico_id}
// /pico-eci-list/:pico_id/:eci true
var to_batch = [];
var dbOps = []
dbRange(ldb, {
prefix: ["pico"],
}, function(data){
if(data.key[2] !== "channel"){
return;
}
var pico_id = data.key[1];
var eci = data.key[3];
dbRange(ldb, {
prefix: ['pico']
}, function (data) {
if (data.key[2] !== 'channel') {
return
}
var picoId = data.key[1]
var eci = data.key[3]
to_batch.push({
type: "put",
key: ["channel", eci],
value: _.assign({}, data.value, {
pico_id: pico_id,
}),
});
to_batch.push({
type: "put",
key: ["pico-eci-list", pico_id, eci],
value: true,
});
dbOps.push({
type: 'put',
key: ['channel', eci],
value: _.assign({}, data.value, {
pico_id: picoId
})
})
dbOps.push({
type: 'put',
key: ['pico-eci-list', picoId, eci],
value: true
})
to_batch.push({type: "del", key: data.key});
to_batch.push({type: "del", key: ["eci-to-pico_id", eci]});
dbOps.push({type: 'del', key: data.key})
dbOps.push({type: 'del', key: ['eci-to-pico_id', eci]})
}, function (err) {
if (err) return callback(err)
}, function(err){
if(err) return callback(err);
ldb.batch(to_batch, callback);
});
},
};
ldb.batch(dbOps, callback)
})
}
}

@@ -1,39 +0,38 @@

var dbRange = require("../dbRange");
var dbRange = require('../dbRange')
module.exports = {
up: function(ldb, callback){
// /pico/:pico_id/ruleset/:rid
// -> /pico-ruleset/:pico_id/:rid
// -> /ruleset-pico/:rid/:pico_id
up: function (ldb, callback) {
// /pico/:pico_id/ruleset/:rid
// -> /pico-ruleset/:pico_id/:rid
// -> /ruleset-pico/:rid/:pico_id
var to_batch = [];
var dbOps = []
dbRange(ldb, {
prefix: ["pico"],
}, function(data){
if(data.key[2] !== "ruleset"){
return;
}
var pico_id = data.key[1];
var rid = data.key[3];
dbRange(ldb, {
prefix: ['pico']
}, function (data) {
if (data.key[2] !== 'ruleset') {
return
}
var picoId = data.key[1]
var rid = data.key[3]
to_batch.push({
type: "put",
key: ["pico-ruleset", pico_id, rid],
value: data.value,
});
to_batch.push({
type: "put",
key: ["ruleset-pico", pico_id, rid],
value: data.value,
});
dbOps.push({
type: 'put',
key: ['pico-ruleset', picoId, rid],
value: data.value
})
dbOps.push({
type: 'put',
key: ['ruleset-pico', picoId, rid],
value: data.value
})
to_batch.push({type: "del", key: data.key});
dbOps.push({type: 'del', key: data.key})
}, function (err) {
if (err) return callback(err)
}, function(err){
if(err) return callback(err);
ldb.batch(to_batch, callback);
});
},
};
ldb.batch(dbOps, callback)
})
}
}

@@ -1,61 +0,57 @@

var dbRange = require("../dbRange");
var dbRange = require('../dbRange')
module.exports = {
up: function(ldb, callback){
// /entvars/:pico_id/io.picolabs.pico/parent
// /entvars/:pico_id/io.picolabs.pico/children
up: function (ldb, callback) {
// /entvars/:pico_id/io.picolabs.pico/parent
// /entvars/:pico_id/io.picolabs.pico/children
var to_batch = [];
var dbOps = []
dbRange(ldb, {
prefix: ["entvars"],
}, function(data){
dbRange(ldb, {
prefix: ['entvars']
}, function (data) {
var picoId = data.key[1]
var pico_id = data.key[1];
if (data.key[2] !== 'io.picolabs.pico') {
return
}
if (data.key[3] === 'parent') {
var parentId = data.value.id
if(data.key[2] !== "io.picolabs.pico"){
return;
}
if(data.key[3] === "parent"){
dbOps.push({
type: 'put',
key: ['pico', picoId],
value: {
id: picoId,
parent_id: parentId
}
})
dbOps.push({
type: 'put',
key: ['pico-children', parentId, picoId],
value: true
})
}
}, function (err) {
if (err) return callback(err)
var parent_id = data.value.id;
dbRange(ldb, {
prefix: ['channel'],
limit: 1// the old schema relied on the first eci to be root
}, function (data) {
dbOps.push({
type: 'put',
key: ['root_pico'],
value: {
id: data.value.pico_id,
eci: data.value.id
}
})
}, function (err) {
if (err) return callback(err)
to_batch.push({
type: "put",
key: ["pico", pico_id],
value: {
id: pico_id,
parent_id: parent_id,
},
});
to_batch.push({
type: "put",
key: ["pico-children", parent_id, pico_id],
value: true,
});
}
}, function(err){
if(err) return callback(err);
dbRange(ldb, {
prefix: ["channel"],
limit: 1,//the old schema relied on the first eci to be root
}, function(data){
to_batch.push({
type: "put",
key: ["root_pico"],
value: {
id: data.value.pico_id,
eci: data.value.id,
}
});
}, function(err){
if(err) return callback(err);
ldb.batch(to_batch, callback);
});
});
},
};
ldb.batch(dbOps, callback)
})
})
}
}

@@ -1,83 +0,82 @@

var _ = require("lodash");
var dbRange = require("../dbRange");
var sovrinDID = require("sovrin-did");
var _ = require('lodash')
var dbRange = require('../dbRange')
var sovrinDID = require('sovrin-did')
var newChannel_base = function(opts){
var did = sovrinDID.gen();
var channel = {
id: did.did,
pico_id: opts.pico_id,
name: opts.name,
type: opts.type,
sovrin: did,
};
var db_ops = [
{
type: "put",
key: ["channel", channel.id],
value: channel,
},
{
type: "put",
key: ["pico-eci-list", channel.pico_id, channel.id],
value: true,
}
];
return {
channel: channel,
db_ops: db_ops,
};
};
function newChannelBase (opts) {
var did = sovrinDID.gen()
var channel = {
id: did.did,
pico_id: opts.pico_id,
name: opts.name,
type: opts.type,
sovrin: did
}
var dbOps = [
{
type: 'put',
key: ['channel', channel.id],
value: channel
},
{
type: 'put',
key: ['pico-eci-list', channel.pico_id, channel.id],
value: true
}
]
return {
channel: channel,
dbOps: dbOps
}
}
module.exports = {
up: function(ldb, callback){
var db_ops = [];
up: function (ldb, callback) {
var dbOps = []
ldb.get(["root_pico"], function(err, root_pico){
if(err){
if(err.notFound){
root_pico = {};
}else{
return callback(err);
}
}
ldb.get(['root_pico'], function (err, rootPico) {
if (err) {
if (err.notFound) {
rootPico = {}
} else {
return callback(err)
}
}
dbRange(ldb, {
prefix: ["pico"],
}, function(data){
var pico_id = data.key[1];
dbRange(ldb, {
prefix: ['pico']
}, function (data) {
var picoId = data.key[1]
var c = newChannel_base({
pico_id: pico_id,
name: "admin",
type: "secret",
});
var c = newChannelBase({
pico_id: picoId,
name: 'admin',
type: 'secret'
})
db_ops = db_ops.concat(c.db_ops);
dbOps = dbOps.concat(c.dbOps)
var pico = _.assign({}, data.value, {
admin_eci: c.channel.id,
});
var pico = _.assign({}, data.value, {
admin_eci: c.channel.id
})
db_ops.push({
type: "put",
key: ["pico", pico_id],
value: pico,
});
dbOps.push({
type: 'put',
key: ['pico', picoId],
value: pico
})
if(root_pico.id === pico_id){
db_ops.push({
type: "put",
key: ["root_pico"],
value: pico,
});
}
if (rootPico.id === picoId) {
dbOps.push({
type: 'put',
key: ['root_pico'],
value: pico
})
}
}, function (err) {
if (err) return callback(err)
}, function(err){
if(err) return callback(err);
ldb.batch(db_ops, callback);
});
});
},
};
ldb.batch(dbOps, callback)
})
})
}
}

@@ -1,60 +0,60 @@

var _ = require("lodash");
var ktypes = require("krl-stdlib/types");
var dbRange = require("../dbRange");
var _ = require('lodash')
var ktypes = require('krl-stdlib/types')
var dbRange = require('../dbRange')
module.exports = {
up: function(ldb, callback){
var db_ops = [];
up: function (ldb, callback) {
var dbOps = []
var onKV = function(data){
var key_prefix = data.key;
var val = data.value;
var onKV = function (data) {
var keyPrefix = data.key
var val = data.value
//NOTE: not sharing code with DB.js b/c migrations should be immutable
//i.e. produce the same result regardless of previous codebase states
var index_type = ktypes.typeOf(val);
var root_value = {type: index_type};
switch(index_type){
case "Null":
root_value.value = null;
break;
case "Function":
case "Action":
root_value.type = "String";
root_value.value = ktypes.toString(val);
break;
case "Map":
case "Array":
_.each(val, function(v, k){
db_ops.push({
type: "put",
key: key_prefix.concat(["value", k]),
value: v,
});
});
break;
default:
root_value.value = val;
}
db_ops.push({
type: "put",
key: key_prefix,
value: root_value,
});
};
// NOTE: not sharing code with DB.js b/c migrations should be immutable
// i.e. produce the same result regardless of previous codebase states
var indexType = ktypes.typeOf(val)
var rootValue = {type: indexType}
switch (indexType) {
case 'Null':
rootValue.value = null
break
case 'Function':
case 'Action':
rootValue.type = 'String'
rootValue.value = ktypes.toString(val)
break
case 'Map':
case 'Array':
_.each(val, function (v, k) {
dbOps.push({
type: 'put',
key: keyPrefix.concat(['value', k]),
value: v
})
})
break
default:
rootValue.value = val
}
dbOps.push({
type: 'put',
key: keyPrefix,
value: rootValue
})
}
dbRange(ldb, {
prefix: ["entvars"],
}, onKV, function(err){
if(err) return callback(err);
dbRange(ldb, {
prefix: ['entvars']
}, onKV, function (err) {
if (err) return callback(err)
dbRange(ldb, {
prefix: ["appvars"],
}, onKV, function(err){
if(err) return callback(err);
dbRange(ldb, {
prefix: ['appvars']
}, onKV, function (err) {
if (err) return callback(err)
ldb.batch(db_ops, callback);
});
});
},
};
ldb.batch(dbOps, callback)
})
})
}
}

@@ -1,86 +0,86 @@

var _ = require("lodash");
var cuid = require("cuid");
var test = require("tape");
var encode = require("encoding-down");
var dbRange = require("../dbRange");
var levelup = require("levelup");
var memdown = require("memdown");
var bytewise = require("bytewise");
var safeJsonCodec = require("level-json-coerce-null");
var migration = require("./20171031T182007_pvar_index");
var _ = require('lodash')
var cuid = require('cuid')
var test = require('tape')
var encode = require('encoding-down')
var dbRange = require('../dbRange')
var levelup = require('levelup')
var memdown = require('memdown')
var bytewise = require('bytewise')
var safeJsonCodec = require('level-json-coerce-null')
var migration = require('./20171031T182007_pvar_index')
test("migration - pvar_index", function(t){
var ldb = levelup(encode(memdown(cuid()), {
keyEncoding: bytewise,
valueEncoding: safeJsonCodec,
}));
var db_ops = [];
var put = function(varname, value){
db_ops.push({
type: "put",
key: ["entvars", "p0", "r0", varname],
value: value,
});
db_ops.push({
type: "put",
key: ["appvars", "r0", varname],
value: value,
});
};
test('migration - pvar_index', function (t) {
var ldb = levelup(encode(memdown(cuid()), {
keyEncoding: bytewise,
valueEncoding: safeJsonCodec
}))
var dbOps = []
var put = function (varname, value) {
dbOps.push({
type: 'put',
key: ['entvars', 'p0', 'r0', varname],
value: value
})
dbOps.push({
type: 'put',
key: ['appvars', 'r0', varname],
value: value
})
}
put("v0", {foo: "bar", baz: 1});
put("v1", [1, 2, 3, "ok"]);
put("v2", "hi");
put("v3", true);
put("v4", void 0);
put("v5", 123.45);
put('v0', {foo: 'bar', baz: 1})
put('v1', [1, 2, 3, 'ok'])
put('v2', 'hi')
put('v3', true)
put('v4', void 0)
put('v5', 123.45)
ldb.batch(db_ops, function(err){
if(err) return t.end(err);
migration.up(ldb, function(err){
if(err) return t.end(err);
var entvars = {};
var appvars = {};
dbRange(ldb, {
prefix: [],
}, function(data){
if(data.key[0] === "entvars"){
_.set(entvars, data.key.slice(3), data.value);
}else if(data.key[0] === "appvars"){
_.set(appvars, data.key.slice(2), data.value);
}
}, function(err){
if(err) return t.end(err);
ldb.batch(dbOps, function (err) {
if (err) return t.end(err)
migration.up(ldb, function (err) {
if (err) return t.end(err)
var entvars = {}
var appvars = {}
dbRange(ldb, {
prefix: []
}, function (data) {
if (data.key[0] === 'entvars') {
_.set(entvars, data.key.slice(3), data.value)
} else if (data.key[0] === 'appvars') {
_.set(appvars, data.key.slice(2), data.value)
}
}, function (err) {
if (err) return t.end(err)
t.deepEquals(entvars, appvars, "ent and app should be the same for these tests");
t.deepEquals(entvars, appvars, 'ent and app should be the same for these tests')
t.deepEquals(entvars.v0, {
type: "Map",
value: {foo: "bar", baz: 1},
});
t.deepEquals(entvars.v1, {
type: "Array",
value: [1, 2, 3, "ok"],
});
t.deepEquals(entvars.v2, {
type: "String",
value: "hi",
});
t.deepEquals(entvars.v3, {
type: "Boolean",
value: true,
});
t.deepEquals(entvars.v4, {
type: "Null",
value: null,
});
t.deepEquals(entvars.v5, {
type: "Number",
value: 123.45,
});
t.deepEquals(entvars.v0, {
type: 'Map',
value: {foo: 'bar', baz: 1}
})
t.deepEquals(entvars.v1, {
type: 'Array',
value: [1, 2, 3, 'ok']
})
t.deepEquals(entvars.v2, {
type: 'String',
value: 'hi'
})
t.deepEquals(entvars.v3, {
type: 'Boolean',
value: true
})
t.deepEquals(entvars.v4, {
type: 'Null',
value: null
})
t.deepEquals(entvars.v5, {
type: 'Number',
value: 123.45
})
t.end();
});
});
});
});
t.end()
})
})
})
})

@@ -1,44 +0,44 @@

var _ = require("lodash");
var dbRange = require("../dbRange");
var _ = require('lodash')
var dbRange = require('../dbRange')
module.exports = {
up: function(ldb, callback){
var db_ops = [];
up: function (ldb, callback) {
var dbOps = []
//NOTE: not sharing with DB.js b/c migrations should be immutable
// i.e. produce the same result regardless of previous codebase states
var ADMIN_POLICY_ID = "allow-all-policy";//NOTE: we default to allow-all for now
// NOTE: not sharing with DB.js b/c migrations should be immutable
// i.e. produce the same result regardless of previous codebase states
var ADMIN_POLICY_ID = 'allow-all-policy'// NOTE: we default to allow-all for now
//the admin policy is hard wired in, so we should create it once before
//the engine starts up (i.e. as this migration)
db_ops.push({
type: "put",
key: ["policy", ADMIN_POLICY_ID],
value: {
id: ADMIN_POLICY_ID,
name: "admin channel policy",
event: {allow: [{}]},
query: {allow: [{}]},
},
});
// the admin policy is hard wired in, so we should create it once before
// the engine starts up (i.e. as this migration)
dbOps.push({
type: 'put',
key: ['policy', ADMIN_POLICY_ID],
value: {
id: ADMIN_POLICY_ID,
name: 'admin channel policy',
event: {allow: [{}]},
query: {allow: [{}]}
}
})
// old engines don't have policy_ids, so just set them all to it
dbRange(ldb, {
prefix: ["channel"],
}, function(data){
if(_.has(data.value, "policy_id")){
return;
}
db_ops.push({
type: "put",
key: data.key,
value: _.assign({}, data.value, {
policy_id: ADMIN_POLICY_ID,
}),
});
}, function(err){
if(err) return callback(err);
ldb.batch(db_ops, callback);
});
},
};
// old engines don't have policy_ids, so just set them all to it
dbRange(ldb, {
prefix: ['channel']
}, function (data) {
if (_.has(data.value, 'policy_id')) {
return
}
dbOps.push({
type: 'put',
key: data.key,
value: _.assign({}, data.value, {
policy_id: ADMIN_POLICY_ID
})
})
}, function (err) {
if (err) return callback(err)
ldb.batch(dbOps, callback)
})
}
}

@@ -1,51 +0,49 @@

var _ = require("lodash");
var dbRange = require("../dbRange");
var _ = require('lodash')
var dbRange = require('../dbRange')
module.exports = {
up: function(ldb, callback){
up: function (ldb, callback) {
var dbOps = []
var db_ops = [];
var newData = {}
var new_data = {};
dbRange(ldb, {
prefix: ['state_machine']
}, function (data) {
var picoId = data.key[1]
var rid = data.key[2]
var ruleName = data.key[3]
dbRange(ldb, {
prefix: ["state_machine"],
}, function(data){
var pico_id = data.key[1];
var rid = data.key[2];
var rule_name = data.key[3];
_.set(newData, [picoId, rid, ruleName, 'state'], data.value)
}, function (err) {
if (err) return callback(err)
_.set(new_data, [pico_id, rid, rule_name, "state"], data.value);
}, function(err){
if(err) return callback(err);
dbRange(ldb, {
prefix: ['state_machine_starttime']
}, function (data) {
var picoId = data.key[1]
var rid = data.key[2]
var ruleName = data.key[3]
dbRange(ldb, {
prefix: ["state_machine_starttime"],
}, function(data){
var pico_id = data.key[1];
var rid = data.key[2];
var rule_name = data.key[3];
_.set(newData, [picoId, rid, ruleName, 'starttime'], data.value)
_.set(new_data, [pico_id, rid, rule_name, "starttime"], data.value);
dbOps.push({type: 'del', key: data.key})
}, function (err) {
if (err) return callback(err)
db_ops.push({type: "del", key: data.key});
}, function(err){
if(err) return callback(err);
_.each(new_data, function(data, pico_id){
_.each(data, function(data, rid){
_.each(data, function(value, rule_name){
db_ops.push({
type: "put",
key: ["state_machine", pico_id, rid, rule_name],
value: value,
});
});
});
});
ldb.batch(db_ops, callback);
});
});
},
};
_.each(newData, function (data, picoId) {
_.each(data, function (data, rid) {
_.each(data, function (value, ruleName) {
dbOps.push({
type: 'put',
key: ['state_machine', picoId, rid, ruleName],
value: value
})
})
})
})
ldb.batch(dbOps, callback)
})
})
}
}

@@ -1,38 +0,37 @@

var _ = require("lodash");
var async = require("async");
var dbRange = require("../dbRange");
var _ = require('lodash')
var async = require('async')
var dbRange = require('../dbRange')
module.exports = {
up: function(ldb, callback){
up: function (ldb, callback) {
var dbOps = []
var db_ops = [];
async.eachSeries([
// For each of these keypath prefixes, rename the rid
"pico-ruleset",
"ruleset-pico",
"entvars",
"appvars",
"state_machine",
"aggregator_var",
], function(prefix, next){
dbRange(ldb, {
prefix: [prefix],
}, function(data){
var new_key = _.map(data.key, function(p){
return p === "io.picolabs.pico"
? "io.picolabs.wrangler"
: p;
});
if(_.isEqual(data.key, new_key)){
return;
}
db_ops.push({type: "put", key: new_key, value: data.value});
db_ops.push({type: "del", key: data.key});
}, next);
}, function(err){
if(err) return callback(err);
ldb.batch(db_ops, callback);
});
},
};
async.eachSeries([
// For each of these keypath prefixes, rename the rid
'pico-ruleset',
'ruleset-pico',
'entvars',
'appvars',
'state_machine',
'aggregator_var'
], function (prefix, next) {
dbRange(ldb, {
prefix: [prefix]
}, function (data) {
var newKey = _.map(data.key, function (p) {
return p === 'io.picolabs.pico'
? 'io.picolabs.wrangler'
: p
})
if (_.isEqual(data.key, newKey)) {
return
}
dbOps.push({type: 'put', key: newKey, value: data.value})
dbOps.push({type: 'del', key: data.key})
}, next)
}, function (err) {
if (err) return callback(err)
ldb.batch(dbOps, callback)
})
}
}

@@ -17,13 +17,13 @@ /**

module.exports = {
"20170727T211511_appvars": require("./20170727T211511_appvars"),
"20170727T223943_entvars": require("./20170727T223943_entvars"),
"20170803T211131_eci-to-pico_id": require("./20170803T211131_eci-to-pico_id"),
"20170803T214146_channel": require("./20170803T214146_channel"),
"20170804T214426_pico-ruleset": require("./20170804T214426_pico-ruleset"),
"20170810T170618_parent-child": require("./20170810T170618_parent-child"),
"20170823T213214_admin_eci": require("./20170823T213214_admin_eci"),
"20171031T182007_pvar_index": require("./20171031T182007_pvar_index"),
"20171117T191959_admin_policy_id": require("./20171117T191959_admin_policy_id"),
"20180222T195856_state_machine": require("./20180222T195856_state_machine"),
"20180223T162324_wranger_rid": require("./20180223T162324_wranger_rid"),
};
'20170727T211511_appvars': require('./20170727T211511_appvars'),
'20170727T223943_entvars': require('./20170727T223943_entvars'),
'20170803T211131_eci-to-pico_id': require('./20170803T211131_eci-to-pico_id'),
'20170803T214146_channel': require('./20170803T214146_channel'),
'20170804T214426_pico-ruleset': require('./20170804T214426_pico-ruleset'),
'20170810T170618_parent-child': require('./20170810T170618_parent-child'),
'20170823T213214_admin_eci': require('./20170823T213214_admin_eci'),
'20171031T182007_pvar_index': require('./20171031T182007_pvar_index'),
'20171117T191959_admin_policy_id': require('./20171117T191959_admin_policy_id'),
'20180222T195856_state_machine': require('./20180222T195856_state_machine'),
'20180223T162324_wranger_rid': require('./20180223T162324_wranger_rid')
}

@@ -1,19 +0,18 @@

var mkKRLfn = require("./mkKRLfn");
var mkKRLfn = require('./mkKRLfn')
module.exports = function(arg_order, fn){
module.exports = function (paramOrder, fn) {
var kfn = mkKRLfn(paramOrder, fn)
var kfn = mkKRLfn(arg_order, fn);
var actionFn = function (ctx, args) {
return kfn(ctx, args).then(function (data) {
return [// actions have multiple returns
// modules return only one value
data
]
})
}
var actionFn = function(ctx, args){
return kfn(ctx, args).then(function(data){
return [// actions have multiple returns
//modules return only one value
data
];
});
};
actionFn.is_an_action = true
actionFn.is_an_action = true;
return actionFn;
};
return actionFn
}

@@ -1,11 +0,11 @@

var _ = require("lodash");
var cocb = require("co-callback");
var normalizeKRLArgs = require("./normalizeKRLArgs");
var _ = require('lodash')
var util = require('util')
var normalizeKRLArgs = require('./normalizeKRLArgs')
module.exports = function(param_order, fn){
var fixArgs = _.partial(normalizeKRLArgs, param_order);
var wfn = cocb.wrap(fn);
return function(ctx, args){
return wfn(ctx, fixArgs(args));
};
};
module.exports = function (paramOrder, fn) {
var fixArgs = _.partial(normalizeKRLArgs, paramOrder)
var wfn = util.promisify(fn)
return function (ctx, args) {
return wfn(ctx, fixArgs(args))
}
}

@@ -1,49 +0,49 @@

var _ = require("lodash");
var fs = require("fs");
var cuid = require("cuid");
var path = require("path");
var memdown = require("memdown");
var PicoEngine = require("./");
var _ = require('lodash')
var fs = require('fs')
var cuid = require('cuid')
var path = require('path')
var memdown = require('memdown')
var PicoEngine = require('./')
var test_rulesets = {};
var test_dir = path.resolve(__dirname, "../../../test-rulesets");
_.each(fs.readdirSync(test_dir), function(file){
if(!/\.js$/.test(file)){
return;
}
var rs = require(path.resolve(test_dir, file));
if(!rs.rid){
return;
}
test_rulesets[rs.rid] = rs;
test_rulesets[rs.rid].url = "http://fake-url/test-rulesets/" + file.replace(/\.js$/, ".krl");
});
var testRulesets = {}
var testDir = path.resolve(__dirname, '../../../test-rulesets')
_.each(fs.readdirSync(testDir), function (file) {
if (!/\.js$/.test(file)) {
return
}
var rs = require(path.resolve(testDir, file))
if (!rs.rid) {
return
}
testRulesets[rs.rid] = rs
testRulesets[rs.rid].url = 'http://fake-url/test-rulesets/' + file.replace(/\.js$/, '.krl')
})
var system_rulesets = _.map(_.keys(test_rulesets), function(rid){
return {
src: "ruleset " + rid + "{}",
meta: {url: test_rulesets[rid].url},
};
});
var systemRulesets = _.map(_.keys(testRulesets), function (rid) {
return {
src: 'ruleset ' + rid + '{}',
meta: {url: testRulesets[rid].url}
}
})
module.exports = function(opts, callback){
opts = opts || {};
var pe = PicoEngine({
host: "https://test-host",
___core_testing_mode: true,
compileAndLoadRuleset: opts.compileAndLoadRuleset || function(rs_info, callback){
var rid = rs_info.src.substring(8, rs_info.src.length - 2);
var rs = test_rulesets[rid];
callback(undefined, rs);
},
rootRIDs: opts.rootRIDs,
db: {
db: opts.ldb || memdown(cuid()),
__use_sequential_ids_for_testing: !opts.__dont_use_sequential_ids_for_testing,
},
modules: opts.modules,
});
pe.start(system_rulesets, function(err){
callback(err, pe);
});
};
module.exports = function (opts, callback) {
opts = opts || {}
var pe = PicoEngine({
host: 'https://test-host',
___core_testing_mode: true,
compileAndLoadRuleset: opts.compileAndLoadRuleset || function (rsInfo, callback) {
var rid = rsInfo.src.substring(8, rsInfo.src.length - 2)
var rs = testRulesets[rid]
callback(null, rs)
},
rootRIDs: opts.rootRIDs,
db: {
db: opts.ldb || memdown(cuid()),
__use_sequential_ids_for_testing: !opts.__dont_use_sequential_ids_for_testing
},
modules: opts.modules
})
pe.start(systemRulesets, function (err) {
callback(err, pe)
})
}

@@ -1,13 +0,13 @@

module.exports = function(core){
return {
get: function(ctx, id, callback){
core.db.getAppVar(ctx.rid, id.var_name, id.query, callback);
},
set: function(ctx, id, value, callback){
core.db.putAppVar(ctx.rid, id.var_name, id.query, value, callback);
},
del: function(ctx, id, callback){
core.db.delAppVar(ctx.rid, id.var_name, id.query, callback);
},
};
};
module.exports = function (core) {
return {
get: function (ctx, id, callback) {
core.db.getAppVar(ctx.rid, id.var_name, id.query, callback)
},
set: function (ctx, id, value, callback) {
core.db.putAppVar(ctx.rid, id.var_name, id.query, value, callback)
},
del: function (ctx, id, callback) {
core.db.delAppVar(ctx.rid, id.var_name, id.query, callback)
}
}
}

@@ -1,528 +0,495 @@

var _ = require("lodash");
var bs58 = require("bs58");
var async = require("async");
var urllib = require("url");
var ktypes = require("krl-stdlib/types");
var mkKRLfn = require("../mkKRLfn");
var sovrinDID = require("sovrin-did");
var mkKRLaction = require("../mkKRLaction");
var ADMIN_POLICY_ID = require("../DB").ADMIN_POLICY_ID;
var _ = require('lodash')
var bs58 = require('bs58')
var async = require('async')
var urllib = require('url')
var ktypes = require('krl-stdlib/types')
var mkKRLfn = require('../mkKRLfn')
var sovrinDID = require('sovrin-did')
var mkKRLaction = require('../mkKRLaction')
var ADMIN_POLICY_ID = require('../DB').ADMIN_POLICY_ID
var assertArg = function(fn_name, args, key, type){
if( ! _.has(args, key)){
throw new Error("engine:" + fn_name + " argument `" + key + "` " + type + " is required");
}
if(ktypes.typeOf(args[key]) !== type){
throw new TypeError("engine:" + fn_name + " argument `" + key + "` should be " + type + " but was " + ktypes.typeOf(args[key]));
}
return args[key];
};
var assertArg = function (fnName, args, key, type) {
if (!_.has(args, key)) {
throw new Error('engine:' + fnName + ' argument `' + key + '` ' + type + ' is required')
}
if (ktypes.typeOf(args[key]) !== type) {
throw new TypeError('engine:' + fnName + ' argument `' + key + '` should be ' + type + ' but was ' + ktypes.typeOf(args[key]))
}
return args[key]
}
var picoArgOrCtxPico = function(fn_name, ctx, args, key){
key = key || "pico_id";
var pico_id = _.has(args, key) ? args[key] : ctx.pico_id;
if(!ktypes.isString(pico_id)){
throw new TypeError("engine:" + fn_name + " was given " + ktypes.toString(args.eci) + " instead of a " + key + " string");
}
return pico_id;
};
var picoArgOrCtxPico = function (fnName, ctx, args, key) {
key = key || 'pico_id'
var picoId = _.has(args, key) ? args[key] : ctx.pico_id
if (!ktypes.isString(picoId)) {
throw new TypeError('engine:' + fnName + ' was given ' + ktypes.toString(args.eci) + ' instead of a ' + key + ' string')
}
return picoId
}
module.exports = function(core){
module.exports = function (core) {
var fns = {
var fns = {
getPicoIDByECI: mkKRLfn([
'eci'
], function (ctx, args, callback) {
if (!_.has(args, 'eci')) {
return callback(new Error('engine:getPicoIDByECI needs an eci string'))
}
if (!ktypes.isString(args.eci)) {
return callback(new TypeError('engine:getPicoIDByECI was given ' + ktypes.toString(args.eci) + ' instead of an eci string'))
}
getPicoIDByECI: mkKRLfn([
"eci",
], function(ctx, args, callback){
core.db.getPicoIDByECI(args.eci, function (err, pico) {
if (err && err.notFound) return callback()
if (err) return callback(err)
callback(null, pico)
})
}),
if(!_.has(args, "eci")){
return callback(new Error("engine:getPicoIDByECI needs an eci string"));
}
if(!ktypes.isString(args.eci)){
return callback(new TypeError("engine:getPicoIDByECI was given " + ktypes.toString(args.eci) + " instead of an eci string"));
}
getParent: mkKRLfn([
'pico_id'
], function (ctx, args, callback) {
var picoId = picoArgOrCtxPico('getParent', ctx, args)
core.db.getPicoIDByECI(args.eci, function(err, pico){
if(err && err.notFound) return callback();
if(err) return callback(err);
callback(null, pico);
});
}),
core.db.assertPicoID(picoId, function (err, picoId) {
if (err && err.notFound) return callback()
if (err) return callback(err)
core.db.getParent(picoId, function (err, parentId) {
if (err && err.notFound) return callback()
if (err) return callback(err)
callback(null, parentId)
})
})
}),
getParent: mkKRLfn([
"pico_id",
], function(ctx, args, callback){
getAdminECI: mkKRLfn([
'pico_id'
], function (ctx, args, callback) {
var picoId = picoArgOrCtxPico('getAdminECI', ctx, args)
var pico_id = picoArgOrCtxPico("getParent", ctx, args);
core.db.assertPicoID(picoId, function (err, picoId) {
if (err && err.notFound) return callback()
if (err) return callback(err)
core.db.assertPicoID(pico_id, function(err, pico_id){
if(err && err.notFound) return callback();
if(err) return callback(err);
core.db.getAdminECI(picoId, function (err, eci) {
if (err && err.notFound) return callback()
if (err) return callback(err)
callback(null, eci)
})
})
}),
core.db.getParent(pico_id, function(err, parent_id){
if(err && err.notFound) return callback();
if(err) return callback(err);
callback(null, parent_id);
});
});
}),
listChildren: mkKRLfn([
'pico_id'
], function (ctx, args, callback) {
var picoId = picoArgOrCtxPico('listChildren', ctx, args)
core.db.assertPicoID(picoId, function (err, picoId) {
if (err && err.notFound) return callback()
if (err) return callback(err)
getAdminECI: mkKRLfn([
"pico_id",
], function(ctx, args, callback){
core.db.listChildren(picoId, function (err, children) {
if (err && err.notFound) return callback()
if (err) return callback(err)
callback(null, children)
})
})
}),
var pico_id = picoArgOrCtxPico("getAdminECI", ctx, args);
listPolicies: mkKRLfn([
], function (ctx, args, callback) {
core.db.listPolicies(callback)
}),
core.db.assertPicoID(pico_id, function(err, pico_id){
if(err && err.notFound) return callback();
if(err) return callback(err);
listChannels: mkKRLfn([
'pico_id'
], function (ctx, args, callback) {
var picoId = picoArgOrCtxPico('listChannels', ctx, args)
core.db.getAdminECI(pico_id, function(err, eci){
if(err && err.notFound) return callback();
if(err) return callback(err);
callback(null, eci);
});
});
}),
core.db.assertPicoID(picoId, function (err, picoId) {
if (err && err.notFound) return callback()
if (err) return callback(err)
core.db.listChannels(picoId, callback)
})
}),
listChildren: mkKRLfn([
"pico_id",
], function(ctx, args, callback){
listInstalledRIDs: mkKRLfn([
'pico_id'
], function (ctx, args, callback) {
var picoId = picoArgOrCtxPico('listInstalledRIDs', ctx, args)
var pico_id = picoArgOrCtxPico("listChildren", ctx, args);
core.db.assertPicoID(picoId, function (err, picoId) {
if (err && err.notFound) return callback()
if (err) return callback(err)
core.db.assertPicoID(pico_id, function(err, pico_id){
if(err && err.notFound) return callback();
if(err) return callback(err);
core.db.ridsOnPico(picoId, function (err, ridSet) {
if (err && err.notFound) return callback()
if (err) return callback(err)
callback(null, _.keys(ridSet))
})
})
}),
core.db.listChildren(pico_id, function(err, children){
if(err && err.notFound) return callback();
if(err) return callback(err);
callback(null, children);
});
});
}),
listAllEnabledRIDs: mkKRLfn([
], function (ctx, args, callback) {
core.db.listAllEnabledRIDs(callback)
}),
describeRuleset: mkKRLfn([
'rid'
], function (ctx, args, callback) {
if (!_.has(args, 'rid')) {
return callback(new Error('engine:describeRuleset needs a rid string'))
}
if (!ktypes.isString(args.rid)) {
return callback(new TypeError('engine:describeRuleset was given ' + ktypes.toString(args.rid) + ' instead of a rid string'))
}
listPolicies: mkKRLfn([
], function(ctx, args, callback){
core.db.listPolicies(callback);
}),
core.db.getEnabledRuleset(args.rid, function (err, data) {
if (err && err.notFound) return callback()
if (err) return callback(err)
var rid = data.rid
callback(null, {
rid: rid,
src: data.src,
hash: data.hash,
url: data.url,
timestamp_stored: data.timestamp_stored,
timestamp_enable: data.timestamp_enable,
meta: {
name: _.get(core.rsreg.get(rid), ['meta', 'name']),
description: _.get(core.rsreg.get(rid), ['meta', 'description']),
author: _.get(core.rsreg.get(rid), ['meta', 'author'])
}
})
})
}),
newPico: mkKRLaction([
'parent_id'
], function (ctx, args, callback) {
var parentId = picoArgOrCtxPico('newPico', ctx, args, 'parent_id')
listChannels: mkKRLfn([
"pico_id",
], function(ctx, args, callback){
core.db.assertPicoID(parentId, function (err, parentId) {
if (err) return callback(err)
var pico_id = picoArgOrCtxPico("listChannels", ctx, args);
core.db.newPico({
parent_id: parentId
}, callback)
})
}),
core.db.assertPicoID(pico_id, function(err, pico_id){
if(err && err.notFound) return callback();
if(err) return callback(err);
removePico: mkKRLaction([
'pico_id'
], function (ctx, args, callback) {
var picoId = picoArgOrCtxPico('removePico', ctx, args)
core.db.listChannels(pico_id, callback);
});
}),
core.db.assertPicoID(picoId, function (err, picoId) {
if (err && err.notFound) return callback(null, false)
if (err) return callback(err)
core.db.listChildren(picoId, function (err, children) {
if (err) return callback(err)
if (_.size(children) > 0) {
callback(new Error('Cannot remove pico "' + picoId + '" because it has ' + _.size(children) + ' children'))
return
}
core.db.removePico(picoId, function () {
if (err && err.notFound) return callback(null, false)
if (err) return callback(err)
callback(null, true)
})
})
})
}),
listInstalledRIDs: mkKRLfn([
"pico_id",
], function(ctx, args, callback){
newPolicy: mkKRLaction([
'policy'
], function (ctx, args, callback) {
core.db.newPolicy(args.policy, callback)
}),
var pico_id = picoArgOrCtxPico("listInstalledRIDs", ctx, args);
removePolicy: mkKRLaction([
'policy_id'
], function (ctx, args, callback) {
var id = args.policy_id
if (!_.isString(id)) {
return callback(new TypeError('engine:removePolicy was given ' + ktypes.toString(id) + ' instead of a policy_id string'))
}
core.db.removePolicy(id, function (err) {
if (err && err.notFound) return callback(null, false)
if (err) return callback(err)
callback(null, true)
})
}),
core.db.assertPicoID(pico_id, function(err, pico_id){
if(err && err.notFound) return callback();
if(err) return callback(err);
newChannel: mkKRLaction([
'pico_id',
'name',
'type',
'policy_id'
], function (ctx, args, callback) {
var picoId = picoArgOrCtxPico('newChannel', ctx, args)
var policyId = ADMIN_POLICY_ID
core.db.ridsOnPico(pico_id, function(err, rid_set){
if(err && err.notFound) return callback();
if(err) return callback(err);
callback(null, _.keys(rid_set));
});
});
}),
if (_.has(args, 'policy_id')) {
if (!ktypes.isString(args.policy_id)) {
throw new TypeError('engine:newChannel argument `policy_id` should be String but was ' + ktypes.typeOf(args.policy_id))
}
policyId = args.policy_id
}
if (!_.has(args, 'name')) {
return callback(new Error('engine:newChannel needs a name string'))
}
if (!_.has(args, 'type')) {
return callback(new Error('engine:newChannel needs a type string'))
}
listAllEnabledRIDs: mkKRLfn([
], function(ctx, args, callback){
core.db.listAllEnabledRIDs(callback);
}),
core.db.assertPicoID(picoId, function (err, picoId) {
if (err) return callback(err)
core.db.assertPolicyID(policyId, function (err, policyId) {
if (err) return callback(err)
describeRuleset: mkKRLfn([
"rid",
], function(ctx, args, callback){
core.db.newChannel({
pico_id: picoId,
name: ktypes.toString(args.name),
type: ktypes.toString(args.type),
policy_id: policyId
}, callback)
})
})
}),
if(!_.has(args, "rid")){
return callback(new Error("engine:describeRuleset needs a rid string"));
}
if(!ktypes.isString(args.rid)){
return callback(new TypeError("engine:describeRuleset was given " + ktypes.toString(args.rid) + " instead of a rid string"));
}
removeChannel: mkKRLaction([
'eci'
], function (ctx, args, callback) {
if (!_.has(args, 'eci')) {
return callback(new Error('engine:removeChannel needs an eci string'))
}
if (!ktypes.isString(args.eci)) {
return callback(new TypeError('engine:removeChannel was given ' + ktypes.toString(args.eci) + ' instead of an eci string'))
}
core.db.getEnabledRuleset(args.rid, function(err, data){
if(err && err.notFound) return callback();
if(err) return callback(err);
var rid = data.rid;
callback(null, {
rid: rid,
src: data.src,
hash: data.hash,
url: data.url,
timestamp_stored: data.timestamp_stored,
timestamp_enable: data.timestamp_enable,
meta: {
name: _.get(core.rsreg.get(rid), ["meta", "name"]),
description: _.get(core.rsreg.get(rid), ["meta", "description"]),
author: _.get(core.rsreg.get(rid), ["meta", "author"]),
},
});
});
}),
core.db.removeChannel(args.eci, function (err) {
if (err && err.notFound) return callback(null, false)
if (err) return callback(err)
callback(null, true)
})
}),
newPico: mkKRLaction([
"parent_id",
], function(ctx, args, callback){
registerRuleset: mkKRLaction([
'url',
'base'
], function (ctx, args, callback) {
if (!_.has(args, 'url')) {
return callback(new Error('engine:registerRuleset needs a url string'))
}
if (!ktypes.isString(args.url)) {
return callback(new TypeError('engine:registerRuleset was given ' + ktypes.toString(args.url) + ' instead of a url string'))
}
var parent_id = picoArgOrCtxPico("newPico", ctx, args, "parent_id");
var uri = ktypes.isString(args.base)
? urllib.resolve(args.base, args.url)
: args.url
core.registerRulesetURL(uri, function (err, data) {
if (err) return callback(err)
callback(null, data.rid)
})
}),
core.db.assertPicoID(parent_id, function(err, parent_id){
if(err) return callback(err);
unregisterRuleset: mkKRLaction([
'rid'
], function (ctx, args, callback) {
if (!_.has(args, 'rid')) {
return callback(new Error('engine:unregisterRuleset needs a rid string or array'))
}
if (ktypes.isString(args.rid)) {
return core.unregisterRuleset(args.rid, callback)
}
if (!ktypes.isArray(args.rid)) {
return callback(new TypeError('engine:unregisterRuleset was given ' + ktypes.toString(args.rid) + ' instead of a rid string or array'))
}
core.db.newPico({
parent_id: parent_id,
}, callback);
});
}),
var rids = _.uniq(args.rid)
var i
for (i = 0; i < rids.length; i++) {
if (!ktypes.isString(rids[i])) {
return callback(new TypeError('engine:unregisterRuleset was given a rid array containing a non-string (' + ktypes.toString(rids[i]) + ')'))
}
}
removePico: mkKRLaction([
"pico_id",
], function(ctx, args, callback){
async.eachSeries(rids, core.unregisterRuleset, callback)
}),
var pico_id = picoArgOrCtxPico("removePico", ctx, args);
installRuleset: mkKRLaction([
'pico_id',
'rid',
'url',
'base'
], function (ctx, args, callback) {
var ridGiven = _.has(args, 'rid')
if (!ridGiven && !_.has(args, 'url')) {
return callback(new Error('engine:installRuleset needs either a rid string or array, or a url string'))
}
core.db.assertPicoID(pico_id, function(err, pico_id){
if(err && err.notFound) return callback(null, false);
if(err) return callback(err);
var picoId = picoArgOrCtxPico('installRuleset', ctx, args)
core.db.listChildren(pico_id, function(err, children){
if(err) return callback(err);
if(_.size(children) > 0){
callback(new Error("Cannot remove pico \"" + pico_id + "\" because it has " + _.size(children) + " children"));
return;
}
core.db.removePico(pico_id, function(){
if(err && err.notFound) return callback(null, false);
if(err) return callback(err);
callback(null, true);
});
});
});
}),
var install = function (rid, callback) {
core.installRuleset(picoId, rid, function (err) {
callback(err, rid)
})
}
core.db.assertPicoID(picoId, function (err, picoId) {
if (err) return callback(err)
newPolicy: mkKRLaction([
"policy",
], function(ctx, args, callback){
core.db.newPolicy(args.policy, callback);
}),
if (ridGiven) {
var ridIsString = ktypes.isString(args.rid)
if (!ridIsString && !ktypes.isArray(args.rid)) {
return callback(new TypeError('engine:installRuleset was given ' + ktypes.toString(args.rid) + ' instead of a rid string or array'))
}
if (ridIsString) {
return install(args.rid, callback)
}
var rids = _.uniq(args.rid)
removePolicy: mkKRLaction([
"policy_id",
], function(ctx, args, callback){
var id = args.policy_id;
if(!_.isString(id)){
return callback(new TypeError("engine:removePolicy was given " + ktypes.toString(id) + " instead of a policy_id string"));
var i
for (i = 0; i < rids.length; i++) {
if (!ktypes.isString(rids[i])) {
return callback(new TypeError('engine:installRuleset was given a rid array containing a non-string (' + ktypes.toString(rids[i]) + ')'))
}
core.db.removePolicy(id, function(err){
if(err && err.notFound) return callback(null, false);
if(err) return callback(err);
callback(null, true);
});
}),
}
return async.mapSeries(rids, install, callback)
}
newChannel: mkKRLaction([
"pico_id",
"name",
"type",
"policy_id",
], function(ctx, args, callback){
if (!ktypes.isString(args.url)) {
return callback(new TypeError('engine:installRuleset was given ' + ktypes.toString(args.url) + ' instead of a url string'))
}
var uri = ktypes.isString(args.base)
? urllib.resolve(args.base, args.url)
: args.url
core.db.findRulesetsByURL(uri, function (err, results) {
if (err) return callback(err)
var rids = _.uniq(_.map(results, 'rid'))
if (_.size(rids) === 0) {
core.registerRulesetURL(uri, function (err, data) {
if (err) return callback(err)
install(data.rid, callback)
})
return
}
if (_.size(rids) !== 1) {
return callback(new Error('More than one rid found for the given url: ' + rids.join(' , ')))
}
install(_.head(rids), callback)
})
})
}),
var pico_id = picoArgOrCtxPico("newChannel", ctx, args);
var policy_id = ADMIN_POLICY_ID;
uninstallRuleset: mkKRLaction([
'pico_id',
'rid'
], function (ctx, args, callback) {
if (!_.has(args, 'rid')) {
return callback(new Error('engine:uninstallRuleset needs a rid string or array'))
}
if(_.has(args, "policy_id")){
if(!ktypes.isString(args.policy_id)){
throw new TypeError("engine:newChannel argument `policy_id` should be String but was " + ktypes.typeOf(args.policy_id));
}
policy_id = args.policy_id;
}
var picoId = picoArgOrCtxPico('uninstallRuleset', ctx, args)
if(!_.has(args, "name")){
return callback(new Error("engine:newChannel needs a name string"));
}
if(!_.has(args, "type")){
return callback(new Error("engine:newChannel needs a type string"));
}
var uninstall = function (rid, callback) {
core.uninstallRuleset(picoId, rid, callback)
}
core.db.assertPicoID(pico_id, function(err, pico_id){
if(err) return callback(err);
core.db.assertPicoID(picoId, function (err, picoId) {
if (err) return callback(err)
core.db.assertPolicyID(policy_id, function(err, policy_id){
if(err) return callback(err);
var ridIsString = ktypes.isString(args.rid)
if (!ridIsString && !ktypes.isArray(args.rid)) {
return callback(new TypeError('engine:uninstallRuleset was given ' + ktypes.toString(args.rid) + ' instead of a rid string or array'))
}
if (ridIsString) {
return uninstall(args.rid, callback)
}
core.db.newChannel({
pico_id: pico_id,
name: ktypes.toString(args.name),
type: ktypes.toString(args.type),
policy_id: policy_id,
}, callback);
});
});
}),
var rids = _.uniq(args.rid)
var i
for (i = 0; i < rids.length; i++) {
if (!ktypes.isString(rids[i])) {
return callback(new TypeError('engine:uninstallRuleset was given a rid array containing a non-string (' + ktypes.toString(rids[i]) + ')'))
}
}
removeChannel: mkKRLaction([
"eci",
], function(ctx, args, callback){
async.eachSeries(rids, uninstall, callback)
})
}),
if(!_.has(args, "eci")){
return callback(new Error("engine:removeChannel needs an eci string"));
}
if(!ktypes.isString(args.eci)){
return callback(new TypeError("engine:removeChannel was given " + ktypes.toString(args.eci) + " instead of an eci string"));
}
encryptChannelMessage: mkKRLfn([
'eci',
'message',
'otherPublicKey'
], function (ctx, args, callback) {
var eci = assertArg('encryptChannelMessage', args, 'eci', 'String')
var message = assertArg('encryptChannelMessage', args, 'message', 'String')
var otherPublicKey = assertArg('encryptChannelMessage', args, 'otherPublicKey', 'String')
core.db.removeChannel(args.eci, function(err){
if(err && err.notFound)return callback(null, false);
if(err)return callback(err);
callback(null, true);
});
}),
core.db.encryptChannelMessage(eci, message, otherPublicKey, callback)
}),
decryptChannelMessage: mkKRLfn([
'eci',
'encryptedMessage',
'nonce',
'otherPublicKey'
], function (ctx, args, callback) {
var eci = assertArg('decryptChannelMessage', args, 'eci', 'String')
var encryptedMessage = assertArg('decryptChannelMessage', args, 'encryptedMessage', 'String')
var nonce = assertArg('decryptChannelMessage', args, 'nonce', 'String')
var otherPublicKey = assertArg('decryptChannelMessage', args, 'otherPublicKey', 'String')
registerRuleset: mkKRLaction([
"url",
"base",
], function(ctx, args, callback){
core.db.decryptChannelMessage(eci, encryptedMessage, nonce, otherPublicKey, callback)
}),
if(!_.has(args, "url")){
return callback(new Error("engine:registerRuleset needs a url string"));
}
if(!ktypes.isString(args.url)){
return callback(new TypeError("engine:registerRuleset was given " + ktypes.toString(args.url) + " instead of a url string"));
}
signChannelMessage: mkKRLfn([
'eci',
'message'
], function (ctx, args, callback) {
var eci = assertArg('signChannelMessage', args, 'eci', 'String')
var message = assertArg('signChannelMessage', args, 'message', 'String')
var uri = ktypes.isString(args.base)
? urllib.resolve(args.base, args.url)
: args.url;
core.registerRulesetURL(uri, function(err, data){
if(err) return callback(err);
callback(null, data.rid);
});
}),
core.db.signChannelMessage(eci, message, callback)
}),
verifySignedMessage: mkKRLfn([
'verifyKey',
'message'
], function (ctx, args, callback) {
var verifyKey = assertArg('verifySignedMessage', args, 'verifyKey', 'String')
var message = assertArg('verifySignedMessage', args, 'message', 'String')
unregisterRuleset: mkKRLaction([
"rid",
], function(ctx, args, callback){
try {
message = bs58.decode(message)
message = sovrinDID.verifySignedMessage(message, verifyKey)
if (message === false) throw new Error('failed')
} catch (e) {
callback(null, false)
return
}
if(!_.has(args, "rid")){
return callback(new Error("engine:unregisterRuleset needs a rid string or array"));
}
if(ktypes.isString(args.rid)){
return core.unregisterRuleset(args.rid, callback);
}
if(!ktypes.isArray(args.rid)){
return callback(new TypeError("engine:unregisterRuleset was given " + ktypes.toString(args.rid) + " instead of a rid string or array"));
}
callback(null, message)
})
var rids = _.uniq(args.rid);
}
var i;
for(i=0; i < rids.length; i++){
if(!ktypes.isString(rids[i])){
return callback(new TypeError("engine:unregisterRuleset was given a rid array containing a non-string (" + ktypes.toString(rids[i]) + ")"));
}
}
async.eachSeries(rids, core.unregisterRuleset, callback);
}),
installRuleset: mkKRLaction([
"pico_id",
"rid",
"url",
"base",
], function(ctx, args, callback){
var rid_given = _.has(args, "rid");
if(!rid_given && !_.has(args, "url")){
return callback(new Error("engine:installRuleset needs either a rid string or array, or a url string"));
}
var pico_id = picoArgOrCtxPico("installRuleset", ctx, args);
var install = function(rid, callback){
core.installRuleset(pico_id, rid, function(err){
callback(err, rid);
});
};
core.db.assertPicoID(pico_id, function(err, pico_id){
if(err) return callback(err);
if(rid_given){
var ridIsString = ktypes.isString(args.rid);
if(!ridIsString && !ktypes.isArray(args.rid)){
return callback(new TypeError("engine:installRuleset was given " + ktypes.toString(args.rid) + " instead of a rid string or array"));
}
if(ridIsString){
return install(args.rid, callback);
}
var rids = _.uniq(args.rid);
var i;
for(i=0; i < rids.length; i++){
if(!ktypes.isString(rids[i])){
return callback(new TypeError("engine:installRuleset was given a rid array containing a non-string (" + ktypes.toString(rids[i]) + ")"));
}
}
return async.mapSeries(rids, install, callback);
}
if(!ktypes.isString(args.url)){
return callback(new TypeError("engine:installRuleset was given " + ktypes.toString(args.url) + " instead of a url string"));
}
var uri = ktypes.isString(args.base)
? urllib.resolve(args.base, args.url)
: args.url;
core.db.findRulesetsByURL(uri, function(err, results){
if(err) return callback(err);
var rids = _.uniq(_.map(results, "rid"));
if(_.size(rids) === 0){
core.registerRulesetURL(uri, function(err, data){
if(err) return callback(err);
install(data.rid, callback);
});
return;
}
if(_.size(rids) !== 1){
return callback(new Error("More than one rid found for the given url: " + rids.join(" , ")));
}
install(_.head(rids), callback);
});
});
}),
uninstallRuleset: mkKRLaction([
"pico_id",
"rid",
], function(ctx, args, callback){
if(!_.has(args, "rid")){
return callback(new Error("engine:uninstallRuleset needs a rid string or array"));
}
var pico_id = picoArgOrCtxPico("uninstallRuleset", ctx, args);
var uninstall = function(rid, callback){
core.uninstallRuleset(pico_id, rid, callback);
};
core.db.assertPicoID(pico_id, function(err, pico_id){
if(err) return callback(err);
var ridIsString = ktypes.isString(args.rid);
if(!ridIsString && !ktypes.isArray(args.rid)){
return callback(new TypeError("engine:uninstallRuleset was given " + ktypes.toString(args.rid) + " instead of a rid string or array"));
}
if(ridIsString){
return uninstall(args.rid, callback);
}
var rids = _.uniq(args.rid);
var i;
for(i=0; i < rids.length; i++){
if(!ktypes.isString(rids[i])){
return callback(new TypeError("engine:uninstallRuleset was given a rid array containing a non-string (" + ktypes.toString(rids[i]) + ")"));
}
}
async.eachSeries(rids, uninstall, callback);
});
}),
encryptChannelMessage: mkKRLfn([
"eci",
"message",
"otherPublicKey"
], function(ctx, args, callback){
var eci = assertArg("encryptChannelMessage", args, "eci", "String");
var message = assertArg("encryptChannelMessage", args, "message", "String");
var otherPublicKey = assertArg("encryptChannelMessage", args, "otherPublicKey", "String");
core.db.encryptChannelMessage(eci, message, otherPublicKey, callback);
}),
decryptChannelMessage: mkKRLfn([
"eci",
"encryptedMessage",
"nonce",
"otherPublicKey"
], function(ctx, args, callback){
var eci = assertArg("decryptChannelMessage", args, "eci", "String");
var encryptedMessage = assertArg("decryptChannelMessage", args, "encryptedMessage", "String");
var nonce = assertArg("decryptChannelMessage", args, "nonce", "String");
var otherPublicKey = assertArg("decryptChannelMessage", args, "otherPublicKey", "String");
core.db.decryptChannelMessage(eci, encryptedMessage, nonce, otherPublicKey, callback);
}),
signChannelMessage: mkKRLfn([
"eci",
"message",
], function(ctx, args, callback){
var eci = assertArg("signChannelMessage", args, "eci", "String");
var message = assertArg("signChannelMessage", args, "message", "String");
core.db.signChannelMessage(eci, message, callback);
}),
verifySignedMessage: mkKRLfn([
"verifyKey",
"message",
], function(ctx, args, callback){
var verifyKey = assertArg("verifySignedMessage", args, "verifyKey", "String");
var message = assertArg("verifySignedMessage", args, "message", "String");
try{
message = bs58.decode(message);
message = sovrinDID.verifySignedMessage(message, verifyKey);
if(message === false) throw "failed";
}catch(e){
callback(null, false);
return;
}
callback(null, message);
}),
};
return {
def: fns,
};
};
return {
def: fns
}
}

@@ -1,729 +0,711 @@

var _ = require("lodash");
var test = require("tape");
var cocb = require("co-callback");
var ktypes = require("krl-stdlib/types");
var strictDeepEquals = require("krl-stdlib/src/strictEquals").strictDeepEquals;
var kengine = require("./engine");
var ADMIN_POLICY_ID = require("../DB").ADMIN_POLICY_ID;
var mkTestPicoEngine = require("../mkTestPicoEngine");
var _ = require('lodash')
var test = require('tape')
var util = require('util')
var ktypes = require('krl-stdlib/types')
var strictDeepEquals = require('../../test/helpers/strictEquals').strictDeepEquals
var kengine = require('./engine')
var ADMIN_POLICY_ID = require('../DB').ADMIN_POLICY_ID
var mkTestPicoEngine = require('../mkTestPicoEngine')
// wrap stubbed functions in this to simulate async
var tick = function (fn) {
return function () {
var args = _.toArray(arguments)
process.nextTick(function () {
fn.apply(null, args)
})
}
}
//wrap stubbed functions in this to simulate async
var tick = function(fn){
return function(){
var args = _.toArray(arguments);
process.nextTick(function(){
fn.apply(null, args);
});
};
};
async function runAction (pe, ctx, domain, id, args) {
var act = await pe.modules.get(ctx, domain, id)
return _.head(await act(ctx, args))
}
var runAction = cocb.wrap(function*(pe, ctx, domain, id, args){
var act = yield pe.modules.get(ctx, domain, id);
return _.head(yield act(ctx, args));
});
var testPE = function (testName, genfn) {
test(testName, function (t) {
mkTestPicoEngine({
rootRIDs: ['io.picolabs.engine']
}, function (err, pe) {
if (err) return t.end(err);
(async function () {
await genfn(t, pe)
}()).then(t.end).catch(t.end)
})
})
}
var testPE = function(test_name, genfn){
test(test_name, function(t){
mkTestPicoEngine({
rootRIDs: ["io.picolabs.engine"],
}, function(err, pe){
if(err) return t.end(err);
async function testError (t, promise, errMsg, msg) {
try {
await promise
t.fail('should fail', msg)
} catch (err) {
t.equals(err + '', errMsg, msg)
}
}
cocb.run(function*(){
yield genfn(t, pe);
}, t.end);
});
});
};
var assertPicoID = function (id, callback) {
if (!ktypes.isString(id)) {
return callback(new TypeError('Invalid pico_id: ' + ktypes.toString(id)))
}
callback(null, id)
}
var testError = cocb.wrap(function*(t, promise, errMsg, msg){
try{
yield promise;
t.fail("should fail", msg);
}catch(err){
t.equals(err + "", errMsg, msg);
}
});
testPE('engine:getPicoIDByECI', async function (t, pe) {
var tstErr = _.partial(testError, t)
var assertPicoID = function(id, callback){
if( ! ktypes.isString(id)){
return callback(new TypeError("Invalid pico_id: " + ktypes.toString(id)));
}
callback(null, id);
};
var getPicoIDByECI = await pe.modules.get({}, 'engine', 'getPicoIDByECI')
var get = function () {
return getPicoIDByECI({}, _.toArray(arguments))
}
t.equals(await get('id1'), 'id0')
testPE("engine:getPicoIDByECI", function*(t, pe){
var tstErr = _.partial(testError, t);
await tstErr(
get(),
'Error: engine:getPicoIDByECI needs an eci string',
'no eci is given'
)
await tstErr(
get(null),
'TypeError: engine:getPicoIDByECI was given null instead of an eci string',
'wrong eci type'
)
t.equals(await get('quux'), void 0, 'eci not found')
})
var getPicoIDByECI = yield pe.modules.get({}, "engine", "getPicoIDByECI");
var get = function(){
return getPicoIDByECI({}, _.toArray(arguments));
};
test('engine:registerRuleset', function (t) {
(async function () {
var tstErr = _.partial(testError, t)
t.equals(yield get("id1"), "id0");
var engine = kengine({
registerRulesetURL: tick(function (url, callback) {
callback(null, {
rid: 'rid for: ' + url
})
})
})
yield tstErr(
get(),
"Error: engine:getPicoIDByECI needs an eci string",
"no eci is given"
);
yield tstErr(
get(null),
"TypeError: engine:getPicoIDByECI was given null instead of an eci string",
"wrong eci type"
);
t.equals(yield get("quux"), void 0, "eci not found");
});
t.equals((await engine.def.registerRuleset({}, {
url: 'http://foo.bar/qux.krl'
}))[0], 'rid for: http://foo.bar/qux.krl')
t.equals((await engine.def.registerRuleset({}, {
url: 'qux.krl',
base: 'https://foo.bar/baz/'
}))[0], 'rid for: https://foo.bar/baz/qux.krl')
test("engine:registerRuleset", function(t){
cocb.run(function*(){
var tstErr = _.partial(testError, t);
await tstErr(
engine.def.registerRuleset({}, []),
'Error: engine:registerRuleset needs a url string',
'no url is given'
)
var engine = kengine({
registerRulesetURL: tick(function(url, callback){
callback(null, {
rid: "rid for: " + url
});
})
});
await tstErr(
engine.def.registerRuleset({}, [_.noop]),
'TypeError: engine:registerRuleset was given [Function] instead of a url string',
'wrong url type'
)
}()).then(t.end).catch(t.end)
})
t.equals((yield engine.def.registerRuleset({}, {
url: "http://foo.bar/qux.krl",
}))[0], "rid for: http://foo.bar/qux.krl");
test('engine:installRuleset', function (t) {
(async function () {
var tstErr = _.partial(testError, t)
t.equals((yield engine.def.registerRuleset({}, {
url: "qux.krl",
base: "https://foo.bar/baz/",
}))[0], "rid for: https://foo.bar/baz/qux.krl");
var engine = kengine({
installRuleset: tick(function (picoId, rid, callback) {
callback()
}),
registerRulesetURL: tick(function (url, callback) {
callback(null, {
rid: 'REG:' + /\/([^/]*)\.krl$/.exec(url)[1]
})
}),
db: {
assertPicoID: assertPicoID,
findRulesetsByURL: tick(function (url, callback) {
if (url === 'http://foo.bar/baz/qux.krl') {
return callback(null, [{rid: 'found'}])
} else if (url === 'file:///too/many.krl') {
return callback(null, [{rid: 'a'}, {rid: 'b'}, {rid: 'c'}])
}
callback(null, [])
})
}
})
yield tstErr(
engine.def.registerRuleset({}, []),
"Error: engine:registerRuleset needs a url string",
"no url is given"
);
var inst = async function (id, rid, url, base) {
var args = {}
if (id !== void 0) {
args.pico_id = id
}
if (rid !== void 0) {
args.rid = rid
}
if (url !== void 0) {
args.url = url
}
if (base !== void 0) {
args.base = base
}
return (await engine.def.installRuleset({}, args))[0]
}
yield tstErr(
engine.def.registerRuleset({}, [_.noop]),
"TypeError: engine:registerRuleset was given [Function] instead of a url string",
"wrong url type"
);
t.equals(await inst('pico0', 'foo.bar'), 'foo.bar')
t.deepEquals(await inst('pico0', ['foo.bar', 'foo.qux']), ['foo.bar', 'foo.qux'])
strictDeepEquals(t, await inst('pico0', []), [])
t.deepEquals(await inst('pico0', void 0, 'file:///foo/bar.krl'), 'REG:bar')
t.deepEquals(await inst('pico0', void 0, 'qux.krl', 'http://foo.bar/baz/'), 'found')
}, t.end);
});
await tstErr(
inst('pico0', void 0, 'file:///too/many.krl'),
'Error: More than one rid found for the given url: a , b , c',
'too many matched'
)
}()).then(t.end).catch(t.end)
})
test("engine:installRuleset", function(t){
cocb.run(function*(){
var tstErr = _.partial(testError, t);
test('engine:uninstallRuleset', function (t) {
(async function () {
var uninstalled = {}
var order = 0
var engine = kengine({
installRuleset: tick(function(pico_id, rid, callback){
callback();
}),
registerRulesetURL: tick(function(url, callback){
callback(null, {
rid: "REG:" + /\/([^/]*)\.krl$/.exec(url)[1]
});
}),
db: {
assertPicoID: assertPicoID,
findRulesetsByURL: tick(function(url, callback){
if(url === "http://foo.bar/baz/qux.krl"){
return callback(null, [{rid: "found"}]);
}else if(url === "file:///too/many.krl"){
return callback(null, [{rid: "a"}, {rid: "b"}, {rid: "c"}]);
}
callback(null, []);
}),
}
});
var engine = kengine({
uninstallRuleset: tick(function (id, rid, callback) {
if (id !== 'pico0') {
return callback(new Error('invalid pico_id'))
}
if (!_.isString(rid)) {
return callback(new Error('invalid rid'))
}
_.set(uninstalled, [id, rid], order++)
callback()
}),
db: {
assertPicoID: assertPicoID
}
})
var inst = cocb.wrap(function*(id, rid, url, base){
var args = {};
if(id !== void 0){
args.pico_id = id;
}
if(rid !== void 0){
args.rid = rid;
}
if(url !== void 0){
args.url = url;
}
if(base !== void 0){
args.base = base;
}
return (yield engine.def.installRuleset({}, args))[0];
});
t.equals((await engine.def.uninstallRuleset({}, {
pico_id: 'pico0',
rid: 'foo.bar'
}))[0], void 0)
t.equals(yield inst("pico0", "foo.bar"), "foo.bar");
t.deepEquals(yield inst("pico0", ["foo.bar", "foo.qux"]), ["foo.bar", "foo.qux"]);
strictDeepEquals(t, yield inst("pico0", []), []);
t.deepEquals(yield inst("pico0", void 0, "file:///foo/bar.krl"), "REG:bar");
t.deepEquals(yield inst("pico0", void 0, "qux.krl", "http://foo.bar/baz/"), "found");
t.equals((await engine.def.uninstallRuleset({}, {
pico_id: 'pico0',
rid: ['baz', 'qux']
}))[0], void 0)
yield tstErr(
inst("pico0", void 0, "file:///too/many.krl"),
"Error: More than one rid found for the given url: a , b , c",
"too many matched"
);
t.deepEquals(uninstalled, {
pico0: {
'foo.bar': 0,
'baz': 1,
'qux': 2
}
})
}()).then(t.end).catch(t.end)
})
}, t.end);
});
test('engine:unregisterRuleset', function (t) {
(async function () {
var tstErr = _.partial(testError, t)
test("engine:uninstallRuleset", function(t){
cocb.run(function*(){
var log = []
var engine = kengine({
unregisterRuleset: tick(function (rid, callback) {
if (!_.isString(rid)) {
return callback(new Error('invalid rid'))
}
log.push(rid)
callback()
})
})
var uninstalled = {};
var order = 0;
t.equals((await engine.def.unregisterRuleset({}, {
rid: 'foo.bar'
}))[0], void 0)
var engine = kengine({
uninstallRuleset: tick(function(id, rid, callback){
if(id !== "pico0"){
return callback(new Error("invalid pico_id"));
}
if(!_.isString(rid)){
return callback(new Error("invalid rid"));
}
_.set(uninstalled, [id, rid], order++);
callback();
}),
db: {
assertPicoID: assertPicoID,
}
});
t.equals((await engine.def.unregisterRuleset({}, {
rid: ['baz', 'qux']
}))[0], void 0)
t.equals((yield engine.def.uninstallRuleset({}, {
pico_id: "pico0",
rid: "foo.bar",
}))[0], void 0);
await tstErr(
engine.def.unregisterRuleset({}, []),
'Error: engine:unregisterRuleset needs a rid string or array'
)
t.equals((yield engine.def.uninstallRuleset({}, {
pico_id: "pico0",
rid: ["baz", "qux"],
}))[0], void 0);
await tstErr(
engine.def.unregisterRuleset({}, {rid: {}}),
'TypeError: engine:unregisterRuleset was given [Map] instead of a rid string or array'
)
t.deepEquals(uninstalled, {
pico0: {
"foo.bar": 0,
"baz": 1,
"qux": 2,
}
});
await tstErr(
engine.def.unregisterRuleset({}, {
rid: ['baz', 2, 'qux']
}),
'TypeError: engine:unregisterRuleset was given a rid array containing a non-string (2)'
)
}, t.end);
});
t.deepEquals(log, [
'foo.bar',
'baz',
'qux'
])
}()).then(t.end).catch(t.end)
})
test("engine:unregisterRuleset", function(t){
cocb.run(function*(){
var tstErr = _.partial(testError, t);
testPE('engine:describeRuleset', async function (t, pe) {
var tstErr = _.partial(testError, t)
var log = [];
var engine = kengine({
unregisterRuleset: tick(function(rid, callback){
if(!_.isString(rid)){
return callback("invalid rid");
}
log.push(rid);
callback();
}),
});
var ctx = {}
var descRID = await pe.modules.get(ctx, 'engine', 'describeRuleset')
t.equals((yield engine.def.unregisterRuleset({}, {
rid: "foo.bar",
}))[0], void 0);
var desc = await descRID(ctx, {rid: 'io.picolabs.hello_world'})
t.equals((yield engine.def.unregisterRuleset({}, {
rid: ["baz", "qux"],
}))[0], void 0);
var isIsoString = function (str) {
return str === (new Date(str)).toISOString()
}
yield tstErr(
engine.def.unregisterRuleset({}, []),
"Error: engine:unregisterRuleset needs a rid string or array"
);
t.deepEquals(_.keys(desc), [
'rid',
'src',
'hash',
'url',
'timestamp_stored',
'timestamp_enable',
'meta'
])
t.equals(desc.rid, 'io.picolabs.hello_world')
t.ok(_.isString(desc.src))
t.ok(_.isString(desc.hash))
t.ok(_.isString(desc.url))
t.ok(isIsoString(desc.timestamp_stored))
t.ok(isIsoString(desc.timestamp_enable))
t.deepEquals(desc.meta, {
name: 'Hello World',
description: '\nA first ruleset for the Quickstart\n ',
author: 'Phil Windley'
})
yield tstErr(
engine.def.unregisterRuleset({}, {rid: {},}),
"TypeError: engine:unregisterRuleset was given [Map] instead of a rid string or array"
);
await tstErr(
descRID(ctx, []),
'Error: engine:describeRuleset needs a rid string',
'no rid is given'
)
await tstErr(
descRID(ctx, [[]]),
'TypeError: engine:describeRuleset was given [Array] instead of a rid string',
'wrong rid type'
)
yield tstErr(
engine.def.unregisterRuleset({}, {
rid: ["baz", 2, "qux"],
}),
"TypeError: engine:unregisterRuleset was given a rid array containing a non-string (2)"
);
t.equals(await descRID(ctx, {rid: 'not.found'}), void 0)
})
t.deepEquals(log, [
"foo.bar",
"baz",
"qux",
]);
testPE('engine:listAllEnabledRIDs', async function (t, pe) {
var listAllEnabledRIDs = await pe.modules.get({}, 'engine', 'listAllEnabledRIDs')
var rids = await listAllEnabledRIDs({}, [])
t.ok(rids.length > 1, 'should be all the test-rulesets/')
t.ok(_.every(rids, _.isString))
t.ok(_.includes(rids, 'io.picolabs.engine'))
})
}, t.end);
});
testPE('engine:newPico', async function (t, pe) {
var action = function (ctx, name, args) {
return runAction(pe, ctx, 'engine', name, args)
}
testPE("engine:describeRuleset", function * (t, pe){
var tstErr = _.partial(testError, t);
var pico2 = await action({}, 'newPico', {
parent_id: 'id0'
})
t.deepEquals(pico2, {
id: 'id2',
parent_id: 'id0',
admin_eci: 'id3'
})
var ctx = {};
var descRID = yield pe.modules.get(ctx, "engine", "describeRuleset");
// default to ctx.pico_id
var pico3 = await action({
pico_id: 'id2' // called by pico2
}, 'newPico', {})
t.deepEquals(pico3, {
id: 'id4',
parent_id: 'id2',
admin_eci: 'id5'
})
})
var desc = yield descRID(ctx, {rid: "io.picolabs.hello_world"});
testPE('engine:getParent, engine:getAdminECI, engine:listChildren, engine:removePico', async function (t, pe) {
var tstErr = _.partial(testError, t)
var isIsoString = function(str){
return str === (new Date(str)).toISOString();
};
var newPico = function (ctx, args) {
return runAction(pe, ctx, 'engine', 'newPico', args)
}
var removePico = function (ctx, args) {
return runAction(pe, ctx, 'engine', 'removePico', args)
}
t.deepEquals(_.keys(desc), [
"rid",
"src",
"hash",
"url",
"timestamp_stored",
"timestamp_enable",
"meta",
]);
t.equals(desc.rid, "io.picolabs.hello_world");
t.ok(_.isString(desc.src));
t.ok(_.isString(desc.hash));
t.ok(_.isString(desc.url));
t.ok(isIsoString(desc.timestamp_stored));
t.ok(isIsoString(desc.timestamp_enable));
t.deepEquals(desc.meta, {
name: "Hello World",
description: "\nA first ruleset for the Quickstart\n ",
author: "Phil Windley",
});
var getParent = await pe.modules.get({}, 'engine', 'getParent')
var getAdminECI = await pe.modules.get({}, 'engine', 'getAdminECI')
var listChildren = await pe.modules.get({}, 'engine', 'listChildren')
yield tstErr(
descRID(ctx, []),
"Error: engine:describeRuleset needs a rid string",
"no rid is given"
);
yield tstErr(
descRID(ctx, [[]]),
"TypeError: engine:describeRuleset was given [Array] instead of a rid string",
"wrong rid type"
);
await newPico({pico_id: 'id0'}, [])// id2
await newPico({}, ['id0'])// id4
await newPico({pico_id: 'id2'}, [])// id6
t.equals(yield descRID(ctx, {rid: "not.found"}), void 0);
});
t.equals(await getParent({}, ['id0']), null)
t.equals(await getParent({}, ['id2']), 'id0')
t.equals(await getParent({}, ['id4']), 'id0')
t.equals(await getParent({}, ['id6']), 'id2')
t.equals(await getAdminECI({}, ['id0']), 'id1')
t.equals(await getAdminECI({}, ['id2']), 'id3')
t.equals(await getAdminECI({}, ['id4']), 'id5')
t.equals(await getAdminECI({}, ['id6']), 'id7')
testPE("engine:listAllEnabledRIDs", function * (t, pe){
var listAllEnabledRIDs = yield pe.modules.get({}, "engine", "listAllEnabledRIDs");
var rids = yield listAllEnabledRIDs({}, []);
t.ok(rids.length > 1, "should be all the test-rulesets/");
t.ok(_.every(rids, _.isString));
t.ok(_.includes(rids, "io.picolabs.engine"));
});
t.deepEquals(await listChildren({}, ['id0']), ['id2', 'id4'])
t.deepEquals(await listChildren({}, ['id2']), ['id6'])
strictDeepEquals(t, await listChildren({}, ['id4']), [])
strictDeepEquals(t, await listChildren({}, ['id6']), [])
// fallback on ctx.pico_id
t.equals(await getParent({pico_id: 'id6'}, []), 'id2')
t.equals(await getAdminECI({pico_id: 'id6'}, []), 'id7')
t.deepEquals(await listChildren({pico_id: 'id2'}, []), ['id6'])
t.equals(await removePico({pico_id: 'id6'}, []), true)
t.equals(await removePico({pico_id: 'id6'}, []), false)
strictDeepEquals(t, await listChildren({}, ['id2']), [])
testPE("engine:newPico", function * (t, pe){
var action = function(ctx, name, args){
return runAction(pe, ctx, "engine", name, args);
};
// report error on invalid pico_id
var assertInvalidPicoID = function (genfn, id, expected) {
return tstErr(genfn({pico_id: id}, []), expected)
}
var pico2 = yield action({}, "newPico", {
parent_id: "id0",
});
t.deepEquals(pico2, {
id: "id2",
parent_id: "id0",
admin_eci: "id3",
});
await assertInvalidPicoID(getParent, void 0, 'TypeError: engine:getParent was given null instead of a pico_id string')
await assertInvalidPicoID(getAdminECI, void 0, 'TypeError: engine:getAdminECI was given null instead of a pico_id string')
await assertInvalidPicoID(listChildren, void 0, 'TypeError: engine:listChildren was given null instead of a pico_id string')
await assertInvalidPicoID(newPico, void 0, 'TypeError: engine:newPico was given null instead of a parent_id string')
await assertInvalidPicoID(removePico, void 0, 'TypeError: engine:removePico was given null instead of a pico_id string')
//default to ctx.pico_id
var pico3 = yield action({
pico_id: "id2",//called by pico2
}, "newPico", {});
t.deepEquals(pico3, {
id: "id4",
parent_id: "id2",
admin_eci: "id5",
});
});
t.equals(await getAdminECI({}, ['id404']), void 0)
t.equals(await getParent({pico_id: 'id404'}, []), void 0)
t.equals(await listChildren({pico_id: 'id404'}, []), void 0)
await assertInvalidPicoID(newPico, 'id404', 'NotFoundError: Pico not found: id404')
t.equals(await removePico({}, ['id404']), false)
await tstErr(
removePico({}, ['id0']),
'Error: Cannot remove pico "id0" because it has 2 children',
"you can't remove a pico with children"
)
})
testPE("engine:getParent, engine:getAdminECI, engine:listChildren, engine:removePico", function * (t, pe){
var tstErr = _.partial(testError, t);
testPE('engine:newPolicy, engine:listPolicies, engine:removePolicy', async function (t, pe) {
var tstErr = _.partial(testError, t)
var newPico = function(ctx, args){
return runAction(pe, ctx, "engine", "newPico", args);
};
var removePico = function(ctx, args){
return runAction(pe, ctx, "engine", "removePico", args);
};
var newPolicy = function (policy) {
return runAction(pe, {}, 'engine', 'newPolicy', [policy])
}
var listPolicies = await pe.modules.get({}, 'engine', 'listPolicies')
var removePolicy = function (id) {
return runAction(pe, {}, 'engine', 'removePolicy', [id])
}
var getParent = yield pe.modules.get({}, "engine", "getParent");
var getAdminECI = yield pe.modules.get({}, "engine", "getAdminECI");
var listChildren = yield pe.modules.get({}, "engine", "listChildren");
// Making sure ChannelPolicy.clean is on
await tstErr(newPolicy(), 'TypeError: Policy definition should be a Map, but was Null')
await tstErr(newPolicy({name: 1}), 'Error: missing `policy.name`')
yield newPico({pico_id: "id0"}, []);// id2
yield newPico({}, ["id0"]);// id4
yield newPico({pico_id: "id2"}, []);// id6
var pAdmin = {
id: ADMIN_POLICY_ID,
name: 'admin channel policy',
event: {allow: [{}]},
query: {allow: [{}]}
}
t.equals(yield getParent({}, ["id0"]), null);
t.equals(yield getParent({}, ["id2"]), "id0");
t.equals(yield getParent({}, ["id4"]), "id0");
t.equals(yield getParent({}, ["id6"]), "id2");
t.deepEquals(await listPolicies(), [pAdmin])
t.equals(yield getAdminECI({}, ["id0"]), "id1");
t.equals(yield getAdminECI({}, ["id2"]), "id3");
t.equals(yield getAdminECI({}, ["id4"]), "id5");
t.equals(yield getAdminECI({}, ["id6"]), "id7");
var pFoo = await newPolicy({name: 'foo'})
t.deepEquals(pFoo, {
id: 'id2',
name: 'foo',
event: {deny: [], allow: []},
query: {deny: [], allow: []}
})
t.deepEquals(yield listChildren({}, ["id0"]), ["id2", "id4"]);
t.deepEquals(yield listChildren({}, ["id2"]), ["id6"]);
strictDeepEquals(t, yield listChildren({}, ["id4"]), []);
strictDeepEquals(t, yield listChildren({}, ["id6"]), []);
t.deepEquals(await listPolicies(), [pAdmin, pFoo])
//fallback on ctx.pico_id
t.equals(yield getParent({pico_id: "id6"}, []), "id2");
t.equals(yield getAdminECI({pico_id: "id6"}, []), "id7");
t.deepEquals(yield listChildren({pico_id: "id2"}, []), ["id6"]);
t.equals(yield removePico({pico_id: "id6"}, []), true);
t.equals(yield removePico({pico_id: "id6"}, []), false);
strictDeepEquals(t, yield listChildren({}, ["id2"]), []);
var pBar = await newPolicy({
name: 'bar',
event: {allow: [{domain: 'system'}]}
})
t.deepEquals(pBar, {
id: 'id3',
name: 'bar',
event: {deny: [], allow: [{domain: 'system'}]},
query: {deny: [], allow: []}
})
//report error on invalid pico_id
var assertInvalidPicoID = function(genfn, id, expected){
return tstErr(genfn({pico_id: id}, []), expected);
};
t.deepEquals(await listPolicies(), [pAdmin, pFoo, pBar])
yield assertInvalidPicoID(getParent , void 0, "TypeError: engine:getParent was given null instead of a pico_id string");
yield assertInvalidPicoID(getAdminECI , void 0, "TypeError: engine:getAdminECI was given null instead of a pico_id string");
yield assertInvalidPicoID(listChildren, void 0, "TypeError: engine:listChildren was given null instead of a pico_id string");
yield assertInvalidPicoID(newPico , void 0, "TypeError: engine:newPico was given null instead of a parent_id string");
yield assertInvalidPicoID(removePico , void 0, "TypeError: engine:removePico was given null instead of a pico_id string");
await tstErr(removePolicy(), 'TypeError: engine:removePolicy was given null instead of a policy_id string')
t.equals(await removePolicy('id404'), false)
t.equals(yield getAdminECI({}, ["id404"]), void 0);
t.equals(yield getParent({pico_id: "id404"}, []), void 0);
t.equals(yield listChildren({pico_id: "id404"}, []), void 0);
yield assertInvalidPicoID(newPico , "id404", "NotFoundError: Pico not found: id404");
t.equals(yield removePico({}, ["id404"]), false);
t.equals(await removePolicy(pFoo.id), true)
t.equals(await removePolicy(pFoo.id), false)
t.deepEquals(await listPolicies(), [pAdmin, pBar])
yield tstErr(
removePico({}, ["id0"]),
"Error: Cannot remove pico \"id0\" because it has 2 children",
"you can't remove a pico with children"
);
});
await tstErr(removePolicy(pAdmin.id), 'Error: Policy ' + pAdmin.id + ' is in use, cannot remove.')
t.equals(await removePolicy(pBar.id), true)
t.deepEquals(await listPolicies(), [pAdmin])
})
testPE("engine:newPolicy, engine:listPolicies, engine:removePolicy", function * (t, pe){
var tstErr = _.partial(testError, t);
testPE('engine:newChannel, engine:listChannels, engine:removeChannel', async function (t, pe) {
var tstErr = _.partial(testError, t)
var newPolicy = function(policy){
return runAction(pe, {}, "engine", "newPolicy", [policy]);
};
var listPolicies = yield pe.modules.get({}, "engine", "listPolicies");
var removePolicy = function(id){
return runAction(pe, {}, "engine", "removePolicy", [id]);
};
var newPolicy = function (policy) {
return runAction(pe, {}, 'engine', 'newPolicy', [policy])
}
var newChannel = function (ctx, args) {
return runAction(pe, ctx, 'engine', 'newChannel', args)
}
var removeChannel = function (ctx, args) {
return runAction(pe, ctx, 'engine', 'removeChannel', args)
}
var listChannels = await pe.modules.get({}, 'engine', 'listChannels')
// Making sure ChannelPolicy.clean is on
yield tstErr(newPolicy(), "TypeError: Policy definition should be a Map, but was Null");
yield tstErr(newPolicy({name: 1}), "Error: missing `policy.name`");
var mkChan = function (picoId, eci, name, type, policyId) {
return {
pico_id: picoId,
id: eci,
name: name,
type: type,
policy_id: policyId || ADMIN_POLICY_ID,
sovrin: {
did: eci,
verifyKey: 'verifyKey_' + eci
}
}
}
var pAdmin = {
id: ADMIN_POLICY_ID,
name: "admin channel policy",
event: {allow: [{}]},
query: {allow: [{}]},
};
t.deepEquals(await listChannels({}, ['id0']), [
mkChan('id0', 'id1', 'admin', 'secret')
])
t.deepEquals(yield listPolicies(), [pAdmin]);
t.deepEquals(await newChannel({}, ['id0', 'a', 'b']), mkChan('id0', 'id2', 'a', 'b'))
t.deepEquals(await listChannels({}, ['id0']), [
mkChan('id0', 'id1', 'admin', 'secret'),
mkChan('id0', 'id2', 'a', 'b')
])
var pFoo = yield newPolicy({name: "foo"});
t.deepEquals(pFoo, {
id: "id2",
name: "foo",
event: {deny: [], allow: []},
query: {deny: [], allow: []},
});
await tstErr(
newChannel({}, ['id1']),
'Error: engine:newChannel needs a name string',
'no name is given'
)
await tstErr(
newChannel({}, ['id1', 'id1']),
'Error: engine:newChannel needs a type string',
'no type is given'
)
t.deepEquals(yield listPolicies(), [pAdmin, pFoo]);
await tstErr(
removeChannel({}, ['id1']),
"Error: Cannot delete the pico's admin channel",
"removeChannel shouldn't remove the admin channel"
)
await tstErr(
removeChannel({}, []),
'Error: engine:removeChannel needs an eci string',
'no eci is given'
)
await tstErr(
removeChannel({}, [/id1/]),
'TypeError: engine:removeChannel was given re#id1# instead of an eci string',
'wrong eci type'
)
t.equals(await removeChannel({}, ['eci404']), false)
var pBar = yield newPolicy({
name: "bar",
event: {allow: [{domain: "system"}]}
});
t.deepEquals(pBar, {
id: "id3",
name: "bar",
event: {deny: [], allow: [{domain: "system"}]},
query: {deny: [], allow: []},
});
t.equals(await removeChannel({}, ['id2']), true)
t.equals(await removeChannel({}, ['id2']), false)
t.deepEquals(await listChannels({}, ['id0']), [
mkChan('id0', 'id1', 'admin', 'secret')
])
t.deepEquals(yield listPolicies(), [pAdmin, pFoo, pBar]);
// fallback on ctx.pico_id
t.deepEquals(await listChannels({pico_id: 'id0'}, []), [
mkChan('id0', 'id1', 'admin', 'secret')
])
t.deepEquals(await newChannel({pico_id: 'id0'}, {'name': 'a', 'type': 'b'}), mkChan('id0', 'id3', 'a', 'b'))
yield tstErr(removePolicy(), "TypeError: engine:removePolicy was given null instead of a policy_id string");
t.equals(yield removePolicy("id404"), false);
// report error on invalid pico_id
var assertInvalidPicoID = function (genfn, id, expected) {
return tstErr(genfn({pico_id: id}, {'name': 'a', 'type': 'b'}), expected)
}
t.equals(yield removePolicy(pFoo.id), true);
t.equals(yield removePolicy(pFoo.id), false);
t.deepEquals(yield listPolicies(), [pAdmin, pBar]);
await assertInvalidPicoID(newChannel, void 0, 'TypeError: engine:newChannel was given null instead of a pico_id string')
await assertInvalidPicoID(listChannels, void 0, 'TypeError: engine:listChannels was given null instead of a pico_id string')
yield tstErr(removePolicy(pAdmin.id), "Error: Policy " + pAdmin.id + " is in use, cannot remove.");
await assertInvalidPicoID(newChannel, 'id404', 'NotFoundError: Pico not found: id404')
t.deepEquals(await listChannels({}, ['id404']), void 0)
t.equals(yield removePolicy(pBar.id), true);
t.deepEquals(yield listPolicies(), [pAdmin]);
});
// setting policy_id on a newChannel
tstErr(newChannel({}, ['id0', 'a', 'b', 100]), 'TypeError: engine:newChannel argument `policy_id` should be String but was Number')
tstErr(newChannel({}, ['id0', 'a', 'b', 'id404']), 'NotFoundError: Policy not found: id404')
var pFoo = await newPolicy({name: 'foo'})
t.deepEquals(await newChannel({}, ['id0', 'a', 'b', pFoo.id]), mkChan('id0', 'id5', 'a', 'b', pFoo.id))
})
testPE("engine:newChannel, engine:listChannels, engine:removeChannel", function * (t, pe){
var tstErr = _.partial(testError, t);
testPE('engine:installRuleset, engine:listInstalledRIDs, engine:uninstallRuleset', async function (t, pe) {
var tstErr = _.partial(testError, t)
var newPolicy = function(policy){
return runAction(pe, {}, "engine", "newPolicy", [policy]);
};
var newChannel = function(ctx, args){
return runAction(pe, ctx, "engine", "newChannel", args);
};
var removeChannel = function(ctx, args){
return runAction(pe, ctx, "engine", "removeChannel", args);
};
var listChannels = yield pe.modules.get({}, "engine", "listChannels");
var installRS = function (ctx, args) {
return runAction(pe, ctx, 'engine', 'installRuleset', args)
}
var uninstallRID = function (ctx, args) {
return runAction(pe, ctx, 'engine', 'uninstallRuleset', args)
}
var listRIDs = await pe.modules.get({}, 'engine', 'listInstalledRIDs')
var mkChan = function(pico_id, eci, name, type, policy_id){
return {
pico_id: pico_id,
id: eci,
name: name,
type: type,
policy_id: policy_id || ADMIN_POLICY_ID,
sovrin: {
did: eci,
verifyKey: "verifyKey_" + eci,
},
};
};
t.deepEquals(await listRIDs({pico_id: 'id0'}, []), [
'io.picolabs.engine'
])
t.deepEquals(yield listChannels({}, ["id0"]), [
mkChan("id0", "id1", "admin", "secret"),
]);
t.equals(await installRS({}, ['id0', 'io.picolabs.hello_world']), 'io.picolabs.hello_world')
await tstErr(
installRS({}, [NaN]),
'Error: engine:installRuleset needs either a rid string or array, or a url string',
'no rid or url is given'
)
await tstErr(
installRS({}, ['id0', NaN, 0]),
'TypeError: engine:installRuleset was given null instead of a rid string or array',
'wrong rid type'
)
await tstErr(
installRS({}, ['id0', [[]]]),
'TypeError: engine:installRuleset was given a rid array containing a non-string ([Array])',
'rid array has a non-string'
)
await tstErr(
installRS({'pico_id': 'id0'}, {'url': {}}),
'TypeError: engine:installRuleset was given [Map] instead of a url string',
'wrong url type'
)
t.deepEquals(await listRIDs({pico_id: 'id0'}, []), [
'io.picolabs.engine',
'io.picolabs.hello_world'
])
t.deepEquals(yield newChannel({}, ["id0", "a", "b"]), mkChan("id0", "id2", "a", "b"));
t.deepEquals(yield listChannels({}, ["id0"]), [
mkChan("id0", "id1", "admin", "secret"),
mkChan("id0", "id2", "a", "b"),
]);
t.equals(await uninstallRID({}, ['id0', 'io.picolabs.engine']), void 0)
await tstErr(
uninstallRID({}, []),
'Error: engine:uninstallRuleset needs a rid string or array',
'no rid is given'
)
await tstErr(
uninstallRID({}, ['id0', void 0]),
'TypeError: engine:uninstallRuleset was given null instead of a rid string or array',
'wrong rid type'
)
await tstErr(
uninstallRID({}, ['id0', ['null', null]]),
'TypeError: engine:uninstallRuleset was given a rid array containing a non-string (null)',
'rid array has a non-string'
)
t.deepEquals(await listRIDs({pico_id: 'id0'}, []), [
'io.picolabs.hello_world'
])
yield tstErr(
newChannel({}, ["id1"]),
"Error: engine:newChannel needs a name string",
"no name is given"
);
yield tstErr(
newChannel({}, ["id1", "id1"]),
"Error: engine:newChannel needs a type string",
"no type is given"
);
// fallback on ctx.pico_id
t.equals(await uninstallRID({pico_id: 'id0'}, {rid: 'io.picolabs.hello_world'}), void 0)
strictDeepEquals(t, await listRIDs({pico_id: 'id0'}, []), [])
t.equals(await installRS({pico_id: 'id0'}, {rid: 'io.picolabs.hello_world'}), 'io.picolabs.hello_world')
yield tstErr(
removeChannel({}, ["id1"]),
"Error: Cannot delete the pico's admin channel",
"removeChannel shouldn't remove the admin channel"
);
yield tstErr(
removeChannel({}, []),
"Error: engine:removeChannel needs an eci string",
"no eci is given"
);
yield tstErr(
removeChannel({}, [/id1/]),
"TypeError: engine:removeChannel was given re#id1# instead of an eci string",
"wrong eci type"
);
t.equals(yield removeChannel({}, ["eci404"]), false);
// report error on invalid pico_id
var assertInvalidPicoID = function (genfn, id, expected) {
return tstErr(genfn({pico_id: id}, {rid: 'io.picolabs.hello_world'}), expected)
}
t.equals(yield removeChannel({}, ["id2"]), true);
t.equals(yield removeChannel({}, ["id2"]), false);
t.deepEquals(yield listChannels({}, ["id0"]), [
mkChan("id0", "id1", "admin", "secret"),
]);
await assertInvalidPicoID(listRIDs, void 0, 'TypeError: engine:listInstalledRIDs was given null instead of a pico_id string')
//fallback on ctx.pico_id
t.deepEquals(yield listChannels({pico_id: "id0"}, []), [
mkChan("id0", "id1", "admin", "secret"),
]);
t.deepEquals(yield newChannel({pico_id: "id0"}, {"name": "a", "type": "b"}), mkChan("id0", "id3", "a", "b"));
await assertInvalidPicoID(installRS, 'id404', 'NotFoundError: Pico not found: id404')
await assertInvalidPicoID(uninstallRID, 'id404', 'NotFoundError: Pico not found: id404')
t.deepEquals(await listRIDs({pico_id: 'id404'}, []), void 0)
})
//report error on invalid pico_id
var assertInvalidPicoID = function(genfn, id, expected){
return tstErr(genfn({pico_id: id}, {"name": "a", "type": "b"}), expected);
};
test('engine:signChannelMessage, engine:verifySignedMessage, engine:encryptChannelMessage, engine:decryptChannelMessage', function (t) {
(async function () {
var pe = await (util.promisify(mkTestPicoEngine)({
rootRIDs: ['io.picolabs.engine'],
__dont_use_sequential_ids_for_testing: true
}))
var getPicoIDByECI = await pe.modules.get({}, 'engine', 'getPicoIDByECI')
var newChannel = await pe.modules.get({}, 'engine', 'newChannel')
var signChannelMessage = await pe.modules.get({}, 'engine', 'signChannelMessage')
var verifySignedMessage = await pe.modules.get({}, 'engine', 'verifySignedMessage')
var encryptChannelMessage = await pe.modules.get({}, 'engine', 'encryptChannelMessage')
var decryptChannelMessage = await pe.modules.get({}, 'engine', 'decryptChannelMessage')
var sign = function (eci, message) {
return signChannelMessage({}, [eci, message])
}
var verify = function (verifyKey, message) {
return verifySignedMessage({}, [verifyKey, message])
}
var encrypt = function (eci, message, otherPublicKey) {
return encryptChannelMessage({}, [eci, message, otherPublicKey])
}
var decrypt = function (eci, encryptedMessage, nonce, otherPublicKey) {
return decryptChannelMessage({}, [eci, encryptedMessage, nonce, otherPublicKey])
}
yield assertInvalidPicoID(newChannel , void 0, "TypeError: engine:newChannel was given null instead of a pico_id string");
yield assertInvalidPicoID(listChannels, void 0, "TypeError: engine:listChannels was given null instead of a pico_id string");
var eci = await util.promisify(pe.getRootECI)()
var picoId = await getPicoIDByECI({}, [eci])
yield assertInvalidPicoID(newChannel , "id404", "NotFoundError: Pico not found: id404");
t.deepEquals(yield listChannels({}, ["id404"]), void 0);
var chan0 = await newChannel({}, [picoId, 'one', 'one'])
var eci0 = chan0[0].id
var vkey0 = chan0[0].sovrin.verifyKey
var publicKey0 = chan0[0].sovrin.encryptionPublicKey
var chan1 = await newChannel({}, [picoId, 'two', 'two'])
var eci1 = chan1[0].id
var vkey1 = chan1[0].sovrin.verifyKey
var publicKey1 = chan1[0].sovrin.encryptionPublicKey
//setting policy_id on a newChannel
tstErr(newChannel({}, ["id0", "a", "b", 100]), "TypeError: engine:newChannel argument `policy_id` should be String but was Number");
tstErr(newChannel({}, ["id0", "a", "b", "id404"]), "NotFoundError: Policy not found: id404");
var msg = 'some long message! could be json {"hi":1}'
var signed0 = await sign(eci0, msg)
var signed1 = await sign(eci1, msg)
t.ok(_.isString(signed0))
t.ok(_.isString(signed1))
t.notEquals(signed0, signed1)
var pFoo = yield newPolicy({name: "foo"});
t.deepEquals(yield newChannel({}, ["id0", "a", "b", pFoo.id]), mkChan("id0", "id5", "a", "b", pFoo.id));
});
t.equals(await verify(vkey0, signed0), msg)
t.equals(await verify(vkey1, signed1), msg)
t.equals(await verify(vkey1, signed0), false, 'wrong vkey')
t.equals(await verify(vkey0, signed1), false, 'wrong vkey')
testPE("engine:installRuleset, engine:listInstalledRIDs, engine:uninstallRuleset", function * (t, pe){
var tstErr = _.partial(testError, t);
t.equals(await verify('hi', signed1), false, 'rubbish vkey')
t.equals(await verify(vkey0, 'notbs58:%=+!'), false, 'not bs58 message')
var installRS = function(ctx, args){
return runAction(pe, ctx, "engine", "installRuleset", args);
};
var uninstallRID = function(ctx, args){
return runAction(pe, ctx, "engine", "uninstallRuleset", args);
};
var listRIDs = yield pe.modules.get({}, "engine", "listInstalledRIDs");
var encrypted0 = await encrypt(eci0, msg, publicKey1)
var encrypted1 = await encrypt(eci1, msg, publicKey0)
t.deepEquals(yield listRIDs({pico_id: "id0"}, []), [
"io.picolabs.engine",
]);
t.ok(_.isString(encrypted0.encryptedMessage))
t.ok(_.isString(encrypted0.nonce))
t.ok(_.isString(encrypted1.encryptedMessage))
t.ok(_.isString(encrypted1.nonce))
t.notEquals(encrypted0, encrypted1)
t.equals(yield installRS({}, ["id0", "io.picolabs.hello_world"]), "io.picolabs.hello_world");
yield tstErr(
installRS({}, [NaN]),
"Error: engine:installRuleset needs either a rid string or array, or a url string",
"no rid or url is given"
);
yield tstErr(
installRS({}, ["id0", NaN, 0]),
"TypeError: engine:installRuleset was given null instead of a rid string or array",
"wrong rid type"
);
yield tstErr(
installRS({}, ["id0", [[]]]),
"TypeError: engine:installRuleset was given a rid array containing a non-string ([Array])",
"rid array has a non-string"
);
yield tstErr(
installRS({"pico_id": "id0"}, {"url": {}}),
"TypeError: engine:installRuleset was given [Map] instead of a url string",
"wrong url type"
);
t.deepEquals(yield listRIDs({pico_id: "id0"}, []), [
"io.picolabs.engine",
"io.picolabs.hello_world",
]);
var nonce = encrypted0.nonce
var encryptedMessage = encrypted0.encryptedMessage
t.equals(yield uninstallRID({}, ["id0", "io.picolabs.engine"]), void 0);
yield tstErr(
uninstallRID({}, []),
"Error: engine:uninstallRuleset needs a rid string or array",
"no rid is given"
);
yield tstErr(
uninstallRID({}, ["id0", void 0]),
"TypeError: engine:uninstallRuleset was given null instead of a rid string or array",
"wrong rid type"
);
yield tstErr(
uninstallRID({}, ["id0", ["null", null]]),
"TypeError: engine:uninstallRuleset was given a rid array containing a non-string (null)",
"rid array has a non-string"
);
t.deepEquals(yield listRIDs({pico_id: "id0"}, []), [
"io.picolabs.hello_world",
]);
t.equals(await decrypt(eci1, encryptedMessage, nonce, publicKey0), msg, 'message decrypted correctly')
//fallback on ctx.pico_id
t.equals(yield uninstallRID({pico_id: "id0"}, {rid: "io.picolabs.hello_world"}), void 0);
strictDeepEquals(t, yield listRIDs({pico_id: "id0"}, []), []);
t.equals(yield installRS({pico_id: "id0"}, {rid: "io.picolabs.hello_world"}), "io.picolabs.hello_world");
//report error on invalid pico_id
var assertInvalidPicoID = function(genfn, id, expected){
return tstErr(genfn({pico_id: id}, {rid: "io.picolabs.hello_world"}), expected);
};
yield assertInvalidPicoID(listRIDs , void 0, "TypeError: engine:listInstalledRIDs was given null instead of a pico_id string");
yield assertInvalidPicoID(installRS , "id404", "NotFoundError: Pico not found: id404");
yield assertInvalidPicoID(uninstallRID, "id404", "NotFoundError: Pico not found: id404");
t.deepEquals(yield listRIDs({pico_id: "id404"}, []), void 0);
});
test("engine:signChannelMessage, engine:verifySignedMessage, engine:encryptChannelMessage, engine:decryptChannelMessage", function(t){
cocb.run(function*(){
var pe = yield (cocb.wrap(mkTestPicoEngine)({
rootRIDs: ["io.picolabs.engine"],
__dont_use_sequential_ids_for_testing: true,
}));
var getPicoIDByECI = yield pe.modules.get({}, "engine", "getPicoIDByECI");
var newChannel = yield pe.modules.get({}, "engine", "newChannel");
var signChannelMessage = yield pe.modules.get({}, "engine", "signChannelMessage");
var verifySignedMessage = yield pe.modules.get({}, "engine", "verifySignedMessage");
var encryptChannelMessage = yield pe.modules.get({}, "engine", "encryptChannelMessage");
var decryptChannelMessage = yield pe.modules.get({}, "engine", "decryptChannelMessage");
var sign = function(eci, message){
return signChannelMessage({}, [eci, message]);
};
var verify = function(verifyKey, message){
return verifySignedMessage({}, [verifyKey, message]);
};
var encrypt = function(eci, message, otherPublicKey){
return encryptChannelMessage({}, [eci, message, otherPublicKey]);
};
var decrypt = function(eci, encryptedMessage, nonce, otherPublicKey){
return decryptChannelMessage({}, [eci, encryptedMessage, nonce, otherPublicKey]);
};
var eci = yield cocb.wrap(pe.getRootECI)();
var pico_id = yield getPicoIDByECI({}, [eci]);
var chan0 = yield newChannel({}, [pico_id, "one", "one"]);
var eci0 = chan0[0].id;
var vkey0 = chan0[0].sovrin.verifyKey;
var publicKey0 = chan0[0].sovrin.encryptionPublicKey;
var chan1 = yield newChannel({}, [pico_id, "two", "two"]);
var eci1 = chan1[0].id;
var vkey1 = chan1[0].sovrin.verifyKey;
var publicKey1 = chan1[0].sovrin.encryptionPublicKey;
var msg = "some long message! could be json {\"hi\":1}";
var signed0 = yield sign(eci0, msg);
var signed1 = yield sign(eci1, msg);
t.ok(_.isString(signed0));
t.ok(_.isString(signed1));
t.notEquals(signed0, signed1);
t.equals(yield verify(vkey0, signed0), msg);
t.equals(yield verify(vkey1, signed1), msg);
t.equals(yield verify(vkey1, signed0), false, "wrong vkey");
t.equals(yield verify(vkey0, signed1), false, "wrong vkey");
t.equals(yield verify("hi", signed1), false, "rubbish vkey");
t.equals(yield verify(vkey0, "notbs58:%=+!"), false, "not bs58 message");
var encrypted0 = yield encrypt(eci0, msg, publicKey1);
var encrypted1 = yield encrypt(eci1, msg, publicKey0);
t.ok(_.isString(encrypted0.encryptedMessage));
t.ok(_.isString(encrypted0.nonce));
t.ok(_.isString(encrypted1.encryptedMessage));
t.ok(_.isString(encrypted1.nonce));
t.notEquals(encrypted0, encrypted1);
var nonce = encrypted0.nonce;
var encryptedMessage = encrypted0.encryptedMessage;
t.equals(yield decrypt(eci1, encryptedMessage, nonce, publicKey0), msg, "message decrypted correctly");
t.equals(yield decrypt(eci1, encryptedMessage, "bad nonce", publicKey0), false, "bad nonce");
t.equals(yield decrypt(eci1, encryptedMessage, nonce, "Bad public key"), false, "bad key");
t.equals(yield decrypt(eci1, "bogus43212(*(****", nonce, publicKey0), false, "non bs58 message");
}, t.end);
});
t.equals(await decrypt(eci1, encryptedMessage, 'bad nonce', publicKey0), false, 'bad nonce')
t.equals(await decrypt(eci1, encryptedMessage, nonce, 'Bad public key'), false, 'bad key')
t.equals(await decrypt(eci1, 'bogus43212(*(****', nonce, publicKey0), false, 'non bs58 message')
}()).then(t.end).catch(t.end)
})

@@ -1,13 +0,13 @@

module.exports = function(core){
return {
get: function(ctx, id, callback){
core.db.getEntVar(ctx.pico_id, ctx.rid, id.var_name, id.query, callback);
},
set: function(ctx, id, value, callback){
core.db.putEntVar(ctx.pico_id, ctx.rid, id.var_name, id.query, value, callback);
},
del: function(ctx, id, callback){
core.db.delEntVar(ctx.pico_id, ctx.rid, id.var_name, id.query, callback);
},
};
};
module.exports = function (core) {
return {
get: function (ctx, id, callback) {
core.db.getEntVar(ctx.pico_id, ctx.rid, id.var_name, id.query, callback)
},
set: function (ctx, id, value, callback) {
core.db.putEntVar(ctx.pico_id, ctx.rid, id.var_name, id.query, value, callback)
},
del: function (ctx, id, callback) {
core.db.delEntVar(ctx.pico_id, ctx.rid, id.var_name, id.query, callback)
}
}
}

@@ -1,63 +0,66 @@

var _ = require("lodash");
var ktypes = require("krl-stdlib/types");
var mkKRLfn = require("../mkKRLfn");
var mkKRLaction = require("../mkKRLaction");
var request = require("request");
var cleanEvent = require("../cleanEvent");
var _ = require('lodash')
var ktypes = require('krl-stdlib/types')
var mkKRLfn = require('../mkKRLfn')
var mkKRLaction = require('../mkKRLaction')
var request = require('request')
var cleanEvent = require('../cleanEvent')
module.exports = function(core){
var fns = {
attr: mkKRLfn([
"name",
], function(ctx, args, callback){
callback(null, _.get(ctx, ["event", "attrs", args.name], null));
}),
send: mkKRLaction([
"event",
"host",
], function(ctx, args, callback){
var event;
try{
//validate + normalize event, and make sure is not mutated
event = cleanEvent(args.event);
}catch(err){
return callback(err);
}
if(args.host){
var url = args.host;
url += "/sky/event";
url += "/" + event.eci;
url += "/" + event.eid;
url += "/" + event.domain;
url += "/" + event.type;
request({
method: "POST",
url: url,
headers: {"content-type": "application/json"},
body: ktypes.encode(event.attrs),
}, function(err, res, body){
//ignore it
});
callback();
return;
}
core.signalEvent(event);
callback();
}),
};
return {
def: fns,
get: function(ctx, id, callback){
if(id === "eid"){
callback(null, _.get(ctx, ["event", "eid"]));
return;
}else if(id === "attrs"){
//the user may mutate their copy
var attrs = _.cloneDeep(ctx.event.attrs);
callback(null, attrs);
return;
}
callback(new Error("Not defined `event:" + id + "`"));
},
};
};
module.exports = function (core) {
var fns = {
attr: mkKRLfn([
'name'
], function (ctx, args, callback) {
callback(null, _.get(ctx, ['event', 'attrs', args.name], null))
}),
send: mkKRLaction([
'event',
'host'
], function (ctx, args, callback) {
var event
try {
// validate + normalize event, and make sure is not mutated
event = cleanEvent(args.event)
} catch (err) {
return callback(err)
}
if (args.host) {
var url = args.host
url += '/sky/event'
url += '/' + event.eci
url += '/' + event.eid
url += '/' + event.domain
url += '/' + event.type
request({
method: 'POST',
url: url,
headers: {'content-type': 'application/json'},
body: ktypes.encode(event.attrs)
}, function (err, res, body) {
if (err) {
ctx.log('error', err + '')// TODO better handling
}
// ignore
})
callback()
return
}
core.signalEvent(event)
callback()
})
}
return {
def: fns,
get: function (ctx, id, callback) {
if (id === 'eid') {
callback(null, _.get(ctx, ['event', 'eid']))
return
} else if (id === 'attrs') {
// the user may mutate their copy
var attrs = _.cloneDeep(ctx.event.attrs)
callback(null, attrs)
return
}
callback(new Error('Not defined `event:' + id + '`'))
}
}
}

@@ -1,71 +0,62 @@

//var _ = require("lodash");
var test = require("tape");
var http = require("http");
var cocb = require("co-callback");
var event_module = require("./event");
// var _ = require("lodash");
var test = require('tape')
var http = require('http')
var eventModule = require('./event')
test("module - event:attr(name)", function(t){
cocb.run(function*(){
var kevent = event_module();
test('module - event:attr(name)', function (t) {
(async function () {
var kevent = eventModule()
t.equals(
yield kevent.def.attr({event: {attrs: {foo: "bar"}}}, ["foo"]),
"bar"
);
t.equals(
await kevent.def.attr({event: {attrs: {foo: 'bar'}}}, ['foo']),
'bar'
)
//just null if no ctx.event, or it doesn't match
t.equals(yield kevent.def.attr({}, ["baz"]), null);
t.equals(
yield kevent.def.attr({event: {attrs: {foo: "bar"}}}, ["baz"]),
null
);
// just null if no ctx.event, or it doesn't match
t.equals(await kevent.def.attr({}, ['baz']), null)
t.equals(
await kevent.def.attr({event: {attrs: {foo: 'bar'}}}, ['baz']),
null
)
}()).then(t.end).catch(t.end)
})
}, function(err){
t.end(err);
});
});
test('module - event:send(event, host = null)', function (t) {
var serverReached = false
var server = http.createServer(function (req, res) {
serverReached = true
test("module - event:send(event, host = null)", function(t){
var server_reached = false;
var server = http.createServer(function(req, res){
server_reached = true;
var body = ''
req.on('data', function (buffer) {
body += buffer.toString()
})
req.on('end', function () {
t.equals(req.url, '/sky/event/some-eci/none/some-d/some-t')
t.equals(body, '{"foo":{},"bar":[],"baz":{"q":"[Function]"}}')
var body = "";
req.on("data", function(buffer){
body += buffer.toString();
});
req.on("end", function(){
t.equals(req.url, "/sky/event/some-eci/none/some-d/some-t");
t.equals(body, "{\"foo\":{},\"bar\":[],\"baz\":{\"q\":\"[Function]\"}}");
res.end()
server.close()
t.end()
})
})
server.listen(0, function () {
var host = 'http://localhost:' + server.address().port;
(async function () {
var kevent = eventModule()
res.end();
server.close();
t.end();
});
});
server.listen(0, function(){
var host = "http://localhost:" + server.address().port;
cocb.run(function*(){
var kevent = event_module();
t.equals(
(yield kevent.def.send({}, {
event: {
eci: "some-eci",
domain: "some-d",
type: "some-t",
attrs: {foo: {}, bar: [], baz: {"q": function(){}}},
},
host: host,
}))[0],
void 0//returns nothing
);
t.equals(server_reached, false, "should be async, i.e. server not reached yet");
}, function(err){
if(err){
t.end(err);
}
});
});
});
t.equals(
(await kevent.def.send({}, {
event: {
eci: 'some-eci',
domain: 'some-d',
type: 'some-t',
attrs: {foo: {}, bar: [], baz: {'q': function () {}}}
},
host: host
}))[0],
void 0// returns nothing
)
t.equals(serverReached, false, 'should be async, i.e. server not reached yet')
}()).catch(t.end)
})
})

@@ -1,108 +0,108 @@

var _ = require("lodash");
var ktypes = require("krl-stdlib/types");
var request = require("request");
var mkKRLfn = require("../mkKRLfn");
var mkKRLaction = require("../mkKRLaction");
var _ = require('lodash')
var ktypes = require('krl-stdlib/types')
var request = require('request')
var mkKRLfn = require('../mkKRLfn')
var mkKRLaction = require('../mkKRLaction')
var ensureMap = function(arg, defaultTo){
return ktypes.isMap(arg)
? arg
: defaultTo;
};
var ensureMap = function (arg, defaultTo) {
return ktypes.isMap(arg)
? arg
: defaultTo
}
var mkMethod = function(method, isAction){
var mk = isAction ? mkKRLaction : mkKRLfn;
return mk([
//NOTE: order is significant so it's a breaking API change to change argument ordering
"url",
"qs",
"headers",
"body",
"auth",
"json",
"form",
"parseJSON",
"autoraise",
], function(ctx, args, callback){
if(!_.has(args, "url")){
return callback(new Error("http:" + method.toLowerCase() + " needs a url string"));
}
if(!ktypes.isString(args.url)){
return callback(new TypeError("http:" + method.toLowerCase() + " was given " + ktypes.toString(args.url) + " instead of a url string"));
}
var mkMethod = function (method, isAction) {
var mk = isAction ? mkKRLaction : mkKRLfn
return mk([
// NOTE: order is significant so it's a breaking API change to change argument ordering
'url',
'qs',
'headers',
'body',
'auth',
'json',
'form',
'parseJSON',
'autoraise'
], function (ctx, args, callback) {
if (!_.has(args, 'url')) {
return callback(new Error('http:' + method.toLowerCase() + ' needs a url string'))
}
if (!ktypes.isString(args.url)) {
return callback(new TypeError('http:' + method.toLowerCase() + ' was given ' + ktypes.toString(args.url) + ' instead of a url string'))
}
var opts = {
method: method,
url: args.url,
qs: ensureMap(args.qs, {}),
headers: ensureMap(args.headers, {}),
auth: ensureMap(args.auth),
};
var opts = {
method: method,
url: args.url,
qs: ensureMap(args.qs, {}),
headers: ensureMap(args.headers, {}),
auth: ensureMap(args.auth)
}
if(_.has(args, "body")){
opts.body = ktypes.toString(args.body);
}else if(_.has(args, "json")){
opts.body = ktypes.encode(args.json);
if(!_.has(opts.headers, "content-type")){
opts.headers["content-type"] = "application/json";
}
}else if(_.has(args, "form")){
opts.form = ensureMap(args.form);
if (_.has(args, 'body')) {
opts.body = ktypes.toString(args.body)
} else if (_.has(args, 'json')) {
opts.body = ktypes.encode(args.json)
if (!_.has(opts.headers, 'content-type')) {
opts.headers['content-type'] = 'application/json'
}
} else if (_.has(args, 'form')) {
opts.form = ensureMap(args.form)
}
request(opts, function (err, res, body) {
if (err) {
callback(err)
return
}
var r = {
content: body,
content_type: res.headers['content-type'],
content_length: _.parseInt(res.headers['content-length'], 0) || 0,
headers: res.headers,
status_code: res.statusCode,
status_line: res.statusMessage
}
if (args.parseJSON === true) {
try {
r.content = JSON.parse(r.content)
} catch (e) {
// just leave the content as is
}
}
if (_.has(args, 'autoraise')) {
r.label = ktypes.toString(args.autoraise)
ctx.raiseEvent({
domain: 'http',
type: method.toLowerCase(),
attributes: r
// for_rid: "",
}).then(function (r) {
callback(null, r)
}, function (err) {
process.nextTick(function () {
// wrapping in nextTick resolves strange issues with UnhandledPromiseRejectionWarning
// when infact we are handling the rejection
callback(err)
})
})
} else {
callback(null, r)
}
})
})
}
request(opts, function(err, res, body){
if(err){
callback(err);
return;
}
var r = {
content: body,
content_type: res.headers["content-type"],
content_length: _.parseInt(res.headers["content-length"], 0) || 0,
headers: res.headers,
status_code: res.statusCode,
status_line: res.statusMessage
};
if(args.parseJSON === true){
try{
r.content = JSON.parse(r.content);
}catch(e){
//just leave the content as is
}
}
if(_.has(args, "autoraise")){
r.label = ktypes.toString(args.autoraise);
ctx.raiseEvent({
domain: "http",
type: method.toLowerCase(),
attributes: r,
//for_rid: "",
}).then(function(r){
callback(null, r);
}, function(err){
process.nextTick(function(){
//wrapping in nextTick resolves strange issues with UnhandledPromiseRejectionWarning
//when infact we are handling the rejection
callback(err);
});
});
}else{
callback(void 0, r);
}
});
});
};
module.exports = function (core) {
return {
def: {
get: mkMethod('GET'),
head: mkMethod('HEAD'),
module.exports = function(core){
return {
def: {
get: mkMethod("GET"),
head: mkMethod("HEAD"),
post: mkMethod("POST", true),
put: mkMethod("PUT", true),
patch: mkMethod("PATCH", true),
"delete": mkMethod("DELETE", true),
},
};
};
post: mkMethod('POST', true),
put: mkMethod('PUT', true),
patch: mkMethod('PATCH', true),
'delete': mkMethod('DELETE', true)
}
}
}

@@ -1,234 +0,236 @@

var _ = require("lodash");
var test = require("tape");
var http = require("http");
var cocb = require("co-callback");
var khttp = require("./http")().def;
var ktypes = require("krl-stdlib/types");
var testErr = require("../testErr");
var _ = require('lodash')
var test = require('tape')
var http = require('http')
var khttp = require('./http')().def
var ktypes = require('krl-stdlib/types')
var testErr = require('../testErr')
test("http module", function(t){
var server = http.createServer(function(req, res){
var body = "";
req.on("data", function(buffer){
body += buffer.toString();
});
req.on("end", function(){
var out;
if(req.url === "/not-json-resp"){
out = "this is not json";
res.writeHead(200, {
"Content-Length": Buffer.byteLength(out),
});
res.end(out);
return;
}
out = JSON.stringify({
url: req.url,
headers: req.headers,
body: body,
}, false, 2);
res.writeHead(200, {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(out),
"da-extra-header": "wat?",
});
res.end(out);
});
});
test('http module', function (t) {
var server = http.createServer(function (req, res) {
var body = ''
req.on('data', function (buffer) {
body += buffer.toString()
})
req.on('end', function () {
var out
if (req.url === '/not-json-resp') {
out = 'this is not json'
res.writeHead(200, {
'Content-Length': Buffer.byteLength(out)
})
res.end(out)
return
}
out = JSON.stringify({
url: req.url,
headers: req.headers,
body: body
}, false, 2)
res.writeHead(200, {
'Content-Type': 'application/json',
'Content-Length': Buffer.byteLength(out),
'da-extra-header': 'wat?'
})
res.end(out)
})
})
server.listen(0, function(){
var url = "http://localhost:" + server.address().port;
cocb.run(function*(){
var terr = testErr(t, khttp);
server.listen(0, function () {
var url = 'http://localhost:' + server.address().port;
(async function () {
var terr = testErr(t, khttp)
var resp;
var resp
var doHttp = function*(method, args){
var resp = yield khttp[method]({}, args);
if(ktypes.isAction(khttp[method])){
resp = resp[0];
}
t.ok(_.isNumber(resp.content_length));
t.ok(!_.isNaN(resp.content_length));
delete resp.content_length;//windows can return off by 1 so it breaks tests
delete resp.headers["content-length"];//windows can return off by 1 so it breaks tests
delete resp.headers["date"];
return resp;
};
var doHttp = async function (method, args) {
var resp = await khttp[method]({}, args)
if (ktypes.isAction(khttp[method])) {
resp = resp[0]
}
t.ok(_.isNumber(resp.content_length))
t.ok(!_.isNaN(resp.content_length))
delete resp.content_length// windows can return off by 1 so it breaks tests
delete resp.headers['content-length']// windows can return off by 1 so it breaks tests
delete resp.headers['date']
return resp
}
resp = yield doHttp("get", [url, {a: 1}]);
resp.content = JSON.parse(resp.content);
t.deepEquals(resp, {
content: {
"url": "/?a=1",
"headers": {
"host": "localhost:" + server.address().port,
"connection": "close"
},
body: ""
},
content_type: "application/json",
status_code: 200,
status_line: "OK",
headers: {
"content-type": "application/json",
"connection": "close",
"da-extra-header": "wat?",
}
});
resp = await doHttp('get', [url, {a: 1}])
resp.content = JSON.parse(resp.content)
t.deepEquals(resp, {
content: {
'url': '/?a=1',
'headers': {
'host': 'localhost:' + server.address().port,
'connection': 'close'
},
body: ''
},
content_type: 'application/json',
status_code: 200,
status_line: 'OK',
headers: {
'content-type': 'application/json',
'connection': 'close',
'da-extra-header': 'wat?'
}
})
//raw post body
resp = yield doHttp("post", {
url: url,
qs: {"baz": "qux"},
headers: {"some": "header"},
body: "some post data",
json: {"json": "get's overriden by raw body"},
form: {"form": "get's overriden by raw body"},
auth: {
username: "bob",
password: "nopass",
}
});
resp.content = JSON.parse(resp.content);
t.deepEquals(resp, {
content: {
"url": "/?baz=qux",
"headers": {
"some": "header",
"host": "localhost:" + server.address().port,
authorization: "Basic Ym9iOm5vcGFzcw==",
"content-length": "14",
"connection": "close"
},
body: "some post data"
},
content_type: "application/json",
status_code: 200,
status_line: "OK",
headers: {
"content-type": "application/json",
"connection": "close",
"da-extra-header": "wat?",
}
});
// raw post body
resp = await doHttp('post', {
url: url,
qs: {'baz': 'qux'},
headers: {'some': 'header'},
body: 'some post data',
json: {'json': "get's overriden by raw body"},
form: {'form': "get's overriden by raw body"},
auth: {
username: 'bob',
password: 'nopass'
}
})
resp.content = JSON.parse(resp.content)
t.deepEquals(resp, {
content: {
'url': '/?baz=qux',
'headers': {
'some': 'header',
'host': 'localhost:' + server.address().port,
authorization: 'Basic Ym9iOm5vcGFzcw==',
'content-length': '14',
'connection': 'close'
},
body: 'some post data'
},
content_type: 'application/json',
status_code: 200,
status_line: 'OK',
headers: {
'content-type': 'application/json',
'connection': 'close',
'da-extra-header': 'wat?'
}
})
//form body
resp = yield doHttp("post", {
url: url,
qs: {"baz": "qux"},
headers: {"some": "header"},
form: {formkey: "formval", foo: ["bar", "baz"]},
});
resp.content = JSON.parse(resp.content);
t.deepEquals(resp, {
content: {
"url": "/?baz=qux",
"headers": {
"some": "header",
"host": "localhost:" + server.address().port,
"content-type": "application/x-www-form-urlencoded",
"content-length": "45",
"connection": "close"
},
body: "formkey=formval&foo%5B0%5D=bar&foo%5B1%5D=baz"
},
content_type: "application/json",
status_code: 200,
status_line: "OK",
headers: {
"content-type": "application/json",
"connection": "close",
"da-extra-header": "wat?",
}
});
// form body
resp = await doHttp('post', {
url: url,
qs: {'baz': 'qux'},
headers: {'some': 'header'},
form: {formkey: 'formval', foo: ['bar', 'baz']}
})
resp.content = JSON.parse(resp.content)
t.deepEquals(resp, {
content: {
'url': '/?baz=qux',
'headers': {
'some': 'header',
'host': 'localhost:' + server.address().port,
'content-type': 'application/x-www-form-urlencoded',
'content-length': '45',
'connection': 'close'
},
body: 'formkey=formval&foo%5B0%5D=bar&foo%5B1%5D=baz'
},
content_type: 'application/json',
status_code: 200,
status_line: 'OK',
headers: {
'content-type': 'application/json',
'connection': 'close',
'da-extra-header': 'wat?'
}
})
// json body
resp = await doHttp('post', {
url: url,
qs: {'baz': 'qux'},
headers: {'some': 'header'},
json: {formkey: 'formval', foo: ['bar', 'baz']}
})
resp.content = JSON.parse(resp.content)
t.deepEquals(resp, {
content: {
'url': '/?baz=qux',
'headers': {
'some': 'header',
'host': 'localhost:' + server.address().port,
'content-type': 'application/json',
'content-length': '41',
'connection': 'close'
},
body: '{"formkey":"formval","foo":["bar","baz"]}'
},
content_type: 'application/json',
status_code: 200,
status_line: 'OK',
headers: {
'content-type': 'application/json',
'connection': 'close',
'da-extra-header': 'wat?'
}
})
//json body
resp = yield doHttp("post", {
url: url,
qs: {"baz": "qux"},
headers: {"some": "header"},
json: {formkey: "formval", foo: ["bar", "baz"]}
});
resp.content = JSON.parse(resp.content);
t.deepEquals(resp, {
content: {
"url": "/?baz=qux",
"headers": {
"some": "header",
"host": "localhost:" + server.address().port,
"content-type": "application/json",
"content-length": "41",
"connection": "close"
},
body: "{\"formkey\":\"formval\",\"foo\":[\"bar\",\"baz\"]}"
},
content_type: "application/json",
status_code: 200,
status_line: "OK",
headers: {
"content-type": "application/json",
"connection": "close",
"da-extra-header": "wat?",
}
});
// parseJSON
resp = await doHttp('post', {
url: url,
parseJSON: true
})
t.deepEquals(resp, {
content: {
'url': '/',
'headers': {
'host': 'localhost:' + server.address().port,
'content-length': '0',
'connection': 'close'
},
body: ''
},
content_type: 'application/json',
status_code: 200,
status_line: 'OK',
headers: {
'content-type': 'application/json',
'connection': 'close',
'da-extra-header': 'wat?'
}
})
// parseJSON when not actually a json response
resp = await doHttp('post', {
url: url + '/not-json-resp',
parseJSON: true
})
t.deepEquals(resp, {
content: 'this is not json',
content_type: void 0,
status_code: 200,
status_line: 'OK',
headers: {
'connection': 'close'
}
})
//parseJSON
resp = yield doHttp("post", {
url: url,
parseJSON: true,
});
t.deepEquals(resp, {
content: {
"url": "/",
"headers": {
"host": "localhost:" + server.address().port,
"content-length": "0",
"connection": "close"
},
body: ""
},
content_type: "application/json",
status_code: 200,
status_line: "OK",
headers: {
"content-type": "application/json",
"connection": "close",
"da-extra-header": "wat?",
}
});
var methods = _.keys(khttp)
var numMethods = _.size(methods)
var errArg = {parseJSON: true}
var typeErrArg = {url: NaN}
//parseJSON when not actually a json response
resp = yield doHttp("post", {
url: url + "/not-json-resp",
parseJSON: true,
});
t.deepEquals(resp, {
content: "this is not json",
content_type: void 0,
status_code: 200,
status_line: "OK",
headers: {
"connection": "close",
}
});
var methods = _.keys(khttp);
var numMethods = _.size(methods);
var errArg = {parseJSON: true};
var typeErrArg = {url: NaN};
var i;
for(i=0; i < numMethods; i++){
var msgSubstring = "Error: http:" + methods[i] + " ";
yield terr(methods[i], {}, errArg, msgSubstring + "needs a url string");
yield terr(methods[i], {}, typeErrArg, "Type" + msgSubstring + "was given null instead of a url string");
}
}, function(err){
server.close();
t.end(err);
});
});
});
var i
for (i = 0; i < numMethods; i++) {
var msgSubstring = 'Error: http:' + methods[i] + ' '
await terr(methods[i], {}, errArg, msgSubstring + 'needs a url string')
await terr(methods[i], {}, typeErrArg, 'Type' + msgSubstring + 'was given null instead of a url string')
}
}())
.then(function () {
server.close()
t.end()
})
.catch(function (err) {
server.close()
t.end(err)
})
})
})

@@ -1,140 +0,136 @@

var _ = require("lodash");
var cocb = require("co-callback");
var ktypes = require("krl-stdlib/types");
var mkKRLfn = require("../mkKRLfn");
var mkKRLaction = require("../mkKRLaction");
var _ = require('lodash')
var util = require('util')
var ktypes = require('krl-stdlib/types')
var mkKRLfn = require('../mkKRLfn')
var mkKRLaction = require('../mkKRLaction')
var sub_modules = {
ent: require("./ent"),
app: require("./app"),
event: require("./event"),
engine: require("./engine"),
http: require("./http"),
keys: require("./keys"),
math: require("./math"),
meta: require("./meta"),
schedule: require("./schedule"),
time: require("./time"),
random: require("./random"),
};
var subModules = {
ent: require('./ent'),
app: require('./app'),
event: require('./event'),
engine: require('./engine'),
http: require('./http'),
keys: require('./keys'),
math: require('./math'),
meta: require('./meta'),
schedule: require('./schedule'),
time: require('./time'),
random: require('./random')
}
var normalizeId = function (domain, id) {
if (domain !== 'ent' && domain !== 'app') {
return ktypes.toString(id)
}
if (_.has(id, 'key') && ktypes.isString(id.key)) {
return {
var_name: id.key,
query: id.path
}
}
return {var_name: ktypes.toString(id)}
}
var normalizeId = function(domain, id){
if(domain !== "ent" && domain !== "app"){
return ktypes.toString(id);
module.exports = function (core, thirdPartyModules) {
var modules = _.mapValues(subModules, function (subModule) {
var m = subModule(core)
if (m.get) {
m.get = util.promisify(m.get)
}
if(_.has(id, "key") && ktypes.isString(id.key)){
return {
var_name: id.key,
query: id.path,
};
if (m.set) {
m.set = util.promisify(m.set)
}
return {var_name: ktypes.toString(id)};
};
if (m.del) {
m.del = util.promisify(m.del)
}
return m
})
_.each(thirdPartyModules, function (ops, domain) {
if (_.has(modules, domain)) {
throw new Error('You cannot override the built-in `' + domain + ':*` module')
}
modules[domain] = {
def: {}
}
_.each(ops, function (op, id) {
var mkErr = function (msg) {
return new Error('Custom module ' + domain + ':' + id + ' ' + msg)
}
if (!op ||
!_.isArray(op.args) ||
!_.every(op.args, _.isString) ||
_.size(op.args) !== _.size(_.uniq(op.args))
) {
throw mkErr('`args` must be a unique array of strings')
}
if (!_.isFunction(op.fn)) {
throw mkErr('`fn` must be `function(args, callback){...}`')
}
module.exports = function(core, third_party_modules){
var fn = function (ctx, args, callback) {
op.fn(args, callback)
}
var modules = _.mapValues(sub_modules, function(m){
return m(core);
});
if (op.type === 'function') {
modules[domain].def[id] = mkKRLfn(op.args, fn)
} else if (op.type === 'action') {
modules[domain].def[id] = mkKRLaction(op.args, fn)
} else {
throw mkErr('`type` must be "action" or "function"')
}
})
})
_.each(third_party_modules, function(ops, domain){
if(_.has(modules, domain)){
throw new Error("You cannot override the built-in `" + domain + ":*` module");
}
modules[domain] = {
def: {},
};
_.each(ops, function(op, id){
var mkErr = function(msg){
return new Error("Custom module " + domain + ":" + id + " " + msg);
};
if(!op
|| !_.isArray(op.args)
|| !_.every(op.args, _.isString)
|| _.size(op.args) !== _.size(_.uniq(op.args))
){
throw mkErr("`args` must be a unique array of strings");
}
if(!_.isFunction(op.fn)){
throw mkErr("`fn` must be `function(args, callback){...}`");
}
var userModuleLookup = function (ctx, domain, id) {
var umod = _.get(core.rsreg.get(ctx.rid), ['modules_used', domain])
var hasIt = _.has(umod, 'scope') &&
umod.scope.has(id) &&
_.includes(umod.provides, id)
var fn = function(ctx, args, callback){
op.fn(args, callback);
};
if(op.type === "function"){
modules[domain].def[id] = mkKRLfn(op.args, fn);
}else if(op.type === "action"){
modules[domain].def[id] = mkKRLaction(op.args, fn);
}else{
throw mkErr("`type` must be \"action\" or \"function\"");
}
});
});
var userModuleLookup = function(ctx, domain, id){
var umod = _.get(core.rsreg.get(ctx.rid), ["modules_used", domain]);
var has_it = _.has(umod, "scope")
&& umod.scope.has(id)
&& _.includes(umod.provides, id)
;
var value = has_it ? umod.scope.get(id) : void 0;
return {
has_it: has_it,
value: value,
};
};
var value = hasIt ? umod.scope.get(id) : void 0
return {
get: cocb.wrap(function(ctx, domain, id, callback){
id = normalizeId(domain, id);
var umod = userModuleLookup(ctx, domain, id);
if(umod.has_it){
callback(null, umod.value);
return;
}
if(_.has(modules, [domain, "def", id])){
callback(null, modules[domain].def[id]);
return;
}
if(_.has(modules, [domain, "get"])){
modules[domain].get(ctx, id, callback);
return;
}
callback(new Error("Not defined `" + domain + ":" + id + "`"));
}),
hasIt: hasIt,
value: value
}
}
return {
get: function (ctx, domain, id) {
id = normalizeId(domain, id)
var umod = userModuleLookup(ctx, domain, id)
if (umod.hasIt) {
return umod.value
}
if (_.has(modules, [domain, 'def', id])) {
return modules[domain].def[id]
}
if (_.has(modules, [domain, 'get'])) {
return modules[domain].get(ctx, id)
}
throw new Error('Not defined `' + domain + ':' + id + '`')
},
set: cocb.wrap(function(ctx, domain, id, value, callback){
id = normalizeId(domain, id);
if(!_.has(modules, domain)){
callback(new Error("Module not defined `" + domain + ":" + id + "`"));
return;
}
if(!_.has(modules[domain], "set")){
callback(new Error("Cannot assign to `" + domain + ":*`"));
return;
}
modules[domain].set(ctx, id, value, callback);
}),
set: function (ctx, domain, id, value) {
id = normalizeId(domain, id)
if (!_.has(modules, domain)) {
throw new Error('Module not defined `' + domain + ':' + id + '`')
}
if (!_.has(modules[domain], 'set')) {
throw new Error('Cannot assign to `' + domain + ':*`')
}
return modules[domain].set(ctx, id, value)
},
del: cocb.wrap(function(ctx, domain, id, callback){
id = normalizeId(domain, id);
if(!_.has(modules, domain)){
callback(new Error("Module not defined `" + domain + ":" + id + "`"));
return;
}
if(!_.has(modules[domain], "del")){
callback(new Error("Cannot clear/delete to `" + domain + ":*`"));
return;
}
modules[domain].del(ctx, id, callback);
}),
};
};
del: function (ctx, domain, id) {
id = normalizeId(domain, id)
if (!_.has(modules, domain)) {
throw new Error('Module not defined `' + domain + ':' + id + '`')
}
if (!_.has(modules[domain], 'del')) {
throw new Error('Cannot clear/delete to `' + domain + ':*`')
}
return modules[domain].del(ctx, id)
}
}
}

@@ -1,14 +0,14 @@

var ktypes = require("krl-stdlib/types");
var ktypes = require('krl-stdlib/types')
module.exports = function(core){
return {
get: function(ctx, id, callback){
var key = ctx.getMyKey(id);
if(ktypes.isNull(key)){
callback(new Error("keys:" + id + " not defined"));
return;
}
callback(null, key);
}
};
};
module.exports = function (core) {
return {
get: function (ctx, id, callback) {
var key = ctx.getMyKey(id)
if (ktypes.isNull(key)) {
callback(new Error('keys:' + id + ' not defined'))
return
}
callback(null, key)
}
}
}

@@ -1,68 +0,65 @@

var _ = require("lodash");
var crypto = require("crypto");
var ktypes = require("krl-stdlib/types");
var mkKRLfn = require("../mkKRLfn");
var _ = require('lodash')
var crypto = require('crypto')
var ktypes = require('krl-stdlib/types')
var mkKRLfn = require('../mkKRLfn')
var supportedHashFns = crypto.getHashes();
var supportedHashFns = crypto.getHashes()
module.exports = function(core){
return {
def: {
module.exports = function (core) {
return {
def: {
base64encode: mkKRLfn([
"str",
], function(ctx, args, callback){
if(!_.has(args, "str")){
return callback(new Error("math:base64encode needs a str string"));
}
base64encode: mkKRLfn([
'str'
], function (ctx, args, callback) {
if (!_.has(args, 'str')) {
return callback(new Error('math:base64encode needs a str string'))
}
var str = ktypes.toString(args.str);
callback(null, Buffer.from(str, "utf8").toString("base64"));
}),
var str = ktypes.toString(args.str)
callback(null, Buffer.from(str, 'utf8').toString('base64'))
}),
base64decode: mkKRLfn([
'str'
], function (ctx, args, callback) {
if (!_.has(args, 'str')) {
return callback(new Error('math:base64decode needs a str string'))
}
base64decode: mkKRLfn([
"str",
], function(ctx, args, callback){
if(!_.has(args, "str")){
return callback(new Error("math:base64decode needs a str string"));
}
var str = ktypes.toString(args.str)
callback(null, Buffer.from(str, 'base64').toString('utf8'))
}),
var str = ktypes.toString(args.str);
callback(null, Buffer.from(str, "base64").toString("utf8"));
}),
hashFunctions: mkKRLfn([
], function (ctx, args, callback) {
callback(null, supportedHashFns)
}),
hash: mkKRLfn([
'hashFn',
'toHash'
], function (ctx, args, callback) {
if (!_.has(args, 'hashFn')) {
return callback(new Error('math:hash needs a hashFn string'))
}
if (!_.has(args, 'toHash')) {
return callback(new Error('math:hash needs a toHash string'))
}
if (!_.includes(supportedHashFns, args.hashFn)) {
if (ktypes.isString(args.hashFn)) {
callback(new Error("math:hash doesn't recognize the hash algorithm " + args.hashFn))
} else {
callback(new TypeError('math:hash was given ' + ktypes.toString(args.hashFn) + ' instead of a hashFn string'))
}
}
hashFunctions: mkKRLfn([
], function(ctx, args, callback){
callback(null, supportedHashFns);
}),
var str = ktypes.toString(args.toHash)
var hash = crypto.createHash(args.hashFn).update(str)
callback(null, hash.digest('hex'))
})
hash: mkKRLfn([
"hashFn",
"toHash"
], function(ctx, args, callback){
if(!_.has(args, "hashFn")){
return callback(new Error("math:hash needs a hashFn string"));
}
if(!_.has(args, "toHash")){
return callback(new Error("math:hash needs a toHash string"));
}
if(!_.includes(supportedHashFns, args.hashFn)){
if(ktypes.isString(args.hashFn)){
callback(new Error("math:hash doesn't recognize the hash algorithm " + args.hashFn));
}else{
callback(new TypeError("math:hash was given " + ktypes.toString(args.hashFn) + " instead of a hashFn string"));
}
}
var str = ktypes.toString(args.toHash);
var hash = crypto.createHash(args.hashFn).update(str);
callback(null, hash.digest("hex"));
}),
}
};
};
}
}
}

@@ -1,44 +0,42 @@

var test = require("tape");
var cocb = require("co-callback");
var kmath = require("./math")().def;
var test = require('tape')
var kmath = require('./math')().def
var testErr = require("../testErr");
var testErr = require('../testErr')
test("module - math:*", function(t){
cocb.run(function*(){
var terr = testErr(t, kmath);
test('module - math:*', function (t) {
(async function () {
var terr = testErr(t, kmath)
t.equals(yield kmath.base64encode({}, ["}{"]), "fXs=", "base64encode");
t.equals(yield kmath.base64encode({}, [null]), yield kmath.base64encode({}, ["null"]), "base64encode coreces to strings");
t.equals(await kmath.base64encode({}, ['}{']), 'fXs=', 'base64encode')
t.equals(await kmath.base64encode({}, [null]), await kmath.base64encode({}, ['null']), 'base64encode coreces to strings')
yield terr("base64encode", {}, [], "Error: math:base64encode needs a str string");
await terr('base64encode', {}, [], 'Error: math:base64encode needs a str string')
t.equals(yield kmath.base64decode({}, ["fXs="]), "}{", "base64decode");
t.equals(await kmath.base64decode({}, ['fXs=']), '}{', 'base64decode')
yield terr("base64decode", {}, [], "Error: math:base64decode needs a str string");
await terr('base64decode', {}, [], 'Error: math:base64decode needs a str string')
t.ok(yield kmath.hashFunctions({}, []), "hashFunctions should return something");
t.ok(await kmath.hashFunctions({}, []), 'hashFunctions should return something')
t.equals(
yield kmath.hash({}, ["sha256", "hello"]),
"2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824",
"sha256 \"hello\""
);
t.equals(
yield kmath.hash({}, ["sha256", null]),
yield kmath.hash({}, ["sha256", "null"]),
"sha2 coerces inputs to Strings"
);
t.equals(
yield kmath.hash({}, ["sha256", [1, 2]]),
yield kmath.hash({}, ["sha256", "[Array]"]),
"sha2 coerces inputs to Strings"
);
t.equals(
await kmath.hash({}, ['sha256', 'hello']),
'2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824',
'sha256 "hello"'
)
t.equals(
await kmath.hash({}, ['sha256', null]),
await kmath.hash({}, ['sha256', 'null']),
'sha2 coerces inputs to Strings'
)
t.equals(
await kmath.hash({}, ['sha256', [1, 2]]),
await kmath.hash({}, ['sha256', '[Array]']),
'sha2 coerces inputs to Strings'
)
yield terr("hash", {}, [], "Error: math:hash needs a hashFn string");
yield terr("hash", {}, [0], "Error: math:hash needs a toHash string");
yield terr("hash", {}, [0, null], "TypeError: math:hash was given 0 instead of a hashFn string");
yield terr("hash", {}, ["0", null], "Error: math:hash doesn't recognize the hash algorithm 0");
}, t.end);
});
await terr('hash', {}, [], 'Error: math:hash needs a hashFn string')
await terr('hash', {}, [0], 'Error: math:hash needs a toHash string')
await terr('hash', {}, [0, null], 'TypeError: math:hash was given 0 instead of a hashFn string')
await terr('hash', {}, ['0', null], "Error: math:hash doesn't recognize the hash algorithm 0")
}()).then(t.end).catch(t.end)
})

@@ -1,57 +0,57 @@

var _ = require("lodash");
var _ = require('lodash')
var getCoreCTXval = {
"eci": function(core, ctx){
return _.get(ctx, ["event", "eci"], _.get(ctx, ["query", "eci"]));
},
"rid": function(core, ctx){
return ctx.rid;
},
"host": function(core, ctx){
return core.host;
},
"picoId": function(core, ctx){
//currently, this will be undefined durring ruleset registration
return ctx.pico_id;
},
"txnId": function(core, ctx){
return ctx.txn_id;
},
"rulesetName": function(core, ctx){
return _.get(core.rsreg.get(ctx.rid), ["meta", "name"]);
},
"rulesetDescription": function(core, ctx){
return _.get(core.rsreg.get(ctx.rid), ["meta", "description"]);
},
"rulesetAuthor": function(core, ctx){
return _.get(core.rsreg.get(ctx.rid), ["meta", "author"]);
},
"ruleName": function(core, ctx){
return ctx.rule_name;
},
"inEvent": function(core, ctx){
return _.has(ctx, "event");
},
"inQuery": function(core, ctx){
return _.has(ctx, "query");
},
};
'eci': function (core, ctx) {
return _.get(ctx, ['event', 'eci'], _.get(ctx, ['query', 'eci']))
},
'rid': function (core, ctx) {
return ctx.rid
},
'host': function (core, ctx) {
return core.host
},
'picoId': function (core, ctx) {
// currently, this will be undefined durring ruleset registration
return ctx.pico_id
},
'txnId': function (core, ctx) {
return ctx.txn_id
},
'rulesetName': function (core, ctx) {
return _.get(core.rsreg.get(ctx.rid), ['meta', 'name'])
},
'rulesetDescription': function (core, ctx) {
return _.get(core.rsreg.get(ctx.rid), ['meta', 'description'])
},
'rulesetAuthor': function (core, ctx) {
return _.get(core.rsreg.get(ctx.rid), ['meta', 'author'])
},
'ruleName': function (core, ctx) {
return ctx.rule_name
},
'inEvent': function (core, ctx) {
return _.has(ctx, 'event')
},
'inQuery': function (core, ctx) {
return _.has(ctx, 'query')
}
}
module.exports = function(core){
return {
get: function(ctx, id, callback){
if(_.has(getCoreCTXval, id)){
callback(null, getCoreCTXval[id](core, ctx));
return;
}
if(id === "rulesetURI"){
core.db.getEnabledRuleset(ctx.rid, function(err, data){
if(err) return callback(err);
callback(null, data.url);
});
return;
}
callback(new Error("Meta attribute not defined `" + id + "`"));
}
};
};
module.exports = function (core) {
return {
get: function (ctx, id, callback) {
if (_.has(getCoreCTXval, id)) {
callback(null, getCoreCTXval[id](core, ctx))
return
}
if (id === 'rulesetURI') {
core.db.getEnabledRuleset(ctx.rid, function (err, data) {
if (err) return callback(err)
callback(null, data.url)
})
return
}
callback(new Error('Meta attribute not defined `' + id + '`'))
}
}
}

@@ -1,66 +0,66 @@

var _ = require("lodash");
var cuid = require("cuid");
var ktypes = require("krl-stdlib/types");
var mkKRLfn = require("../mkKRLfn");
var randomWords = require("random-words");
var _ = require('lodash')
var cuid = require('cuid')
var ktypes = require('krl-stdlib/types')
var mkKRLfn = require('../mkKRLfn')
var randomWords = require('random-words')
var fixLowerUpperArgs = function(args, round){
var lowerNum = ktypes.toNumberOrNull(args.lower);
if(round && lowerNum !== null){
lowerNum = _.round(lowerNum);
}
var fixLowerUpperArgs = function (args, round) {
var lowerNum = ktypes.toNumberOrNull(args.lower)
if (round && lowerNum !== null) {
lowerNum = _.round(lowerNum)
}
var upperNum = ktypes.toNumberOrNull(args.upper);
if(round && upperNum !== null){
upperNum = _.round(upperNum);
}
var upperNum = ktypes.toNumberOrNull(args.upper)
if (round && upperNum !== null) {
upperNum = _.round(upperNum)
}
var upper;
var upper
if(upperNum === null){
upper = lowerNum === null ? 1 : 0;
}else{
upper = upperNum;
}
if (upperNum === null) {
upper = lowerNum === null ? 1 : 0
} else {
upper = upperNum
}
return {
lower: lowerNum === null ? 0 : lowerNum,
upper: upper
};
};
return {
lower: lowerNum === null ? 0 : lowerNum,
upper: upper
}
}
module.exports = function(core){
return {
def: {
module.exports = function (core) {
return {
def: {
uuid: mkKRLfn([
], function(ctx, args, callback){
callback(null, cuid());
}),
uuid: mkKRLfn([
], function (ctx, args, callback) {
callback(null, cuid())
}),
word: mkKRLfn([
], function(ctx, args, callback){
callback(null, randomWords());
}),
word: mkKRLfn([
], function (ctx, args, callback) {
callback(null, randomWords())
}),
integer: mkKRLfn([
"upper",
"lower",
], function(ctx, args_orig, callback){
var args = fixLowerUpperArgs(args_orig, true);
integer: mkKRLfn([
'upper',
'lower'
], function (ctx, argsOrig, callback) {
var args = fixLowerUpperArgs(argsOrig, true)
callback(null, _.random(args.lower, args.upper));
}),
callback(null, _.random(args.lower, args.upper))
}),
number: mkKRLfn([
"upper",
"lower",
], function(ctx, args_orig, callback){
var args = fixLowerUpperArgs(args_orig);
number: mkKRLfn([
'upper',
'lower'
], function (ctx, argsOrig, callback) {
var args = fixLowerUpperArgs(argsOrig)
callback(null, _.random(args.lower, args.upper, true));
}),
callback(null, _.random(args.lower, args.upper, true))
})
}
};
};
}
}
}

@@ -1,85 +0,78 @@

var test = require("tape");
var cocb = require("co-callback");
var ktypes = require("krl-stdlib/types");
var krandom = require("./random")().def;
var test = require('tape')
var ktypes = require('krl-stdlib/types')
var krandom = require('./random')().def
var assertNumRange = function(n, low, high, should_be_int){
if(ktypes.isNumber(n) && (n >= low) && (n <= high)){
if(should_be_int && (n % 1 !== 0)){
throw new Error("not an int: " + n);
}
return true;
var assertNumRange = function (n, low, high, shouldBeInt) {
if (ktypes.isNumber(n) && (n >= low) && (n <= high)) {
if (shouldBeInt && (n % 1 !== 0)) {
throw new Error('not an int: ' + n)
}
throw new Error("invalid number range: " + low + " <= " + n + " <= " + high);
};
return true
}
throw new Error('invalid number range: ' + low + ' <= ' + n + ' <= ' + high)
}
test("module - random:*", function(t){
cocb.run(function*(){
var i;
for(i = 0; i < 5; i++){
t.ok(/^c[^\s]+$/.test(yield krandom.uuid({}, [])));
t.ok(/^[^\s]+$/.test(yield krandom.word({}, [])));
test('module - random:*', function (t) {
(async function () {
var i
for (i = 0; i < 5; i++) {
t.ok(/^c[^\s]+$/.test(await krandom.uuid({}, [])))
t.ok(/^[^\s]+$/.test(await krandom.word({}, [])))
}
}
// just throwup when there is a fail, so we don't polute the tap log with 100s of asserts
var n
for (i = 0; i < 100; i++) {
n = await krandom.integer({}, [])
assertNumRange(n, 0, 1, true)
//just throwup when there is a fail, so we don't polute the tap log with 100s of asserts
var n;
for(i = 0; i < 100; i++){
n = await krandom.integer({}, [0])
assertNumRange(n, 0, 0, true)
n = yield krandom.integer({}, []);
assertNumRange(n, 0, 1, true);
n = await krandom.integer({}, [10])
assertNumRange(n, 0, 10, true)
n = yield krandom.integer({}, [0]);
assertNumRange(n, 0, 0, true);
n = await krandom.integer({}, [-7])
assertNumRange(n, -7, 0, true)
n = yield krandom.integer({}, [10]);
assertNumRange(n, 0, 10, true);
n = await krandom.integer({}, [-3, 5])
assertNumRange(n, -3, 5, true)
n = yield krandom.integer({}, [-7]);
assertNumRange(n, -7, 0, true);
n = await krandom.integer({}, [-3, 'five'])
assertNumRange(n, -3, 0, true)
n = yield krandom.integer({}, [-3, 5]);
assertNumRange(n, -3, 5, true);
n = await krandom.integer({}, ['4.49', -8])
assertNumRange(n, -8, 4, true)
n = yield krandom.integer({}, [-3, "five"]);
assertNumRange(n, -3, 0, true);
n = await krandom.integer({}, ['four', -8.49])
assertNumRange(n, -8, 0, true)
n = yield krandom.integer({}, ["4.49", -8]);
assertNumRange(n, -8, 4, true);
n = await krandom.number({}, [])
assertNumRange(n, 0, 1)
n = yield krandom.integer({}, ["four", -8.49]);
assertNumRange(n, -8, 0, true);
n = await krandom.number({}, [0])
assertNumRange(n, 0, 0)
n = yield krandom.number({}, []);
assertNumRange(n, 0, 1);
n = await krandom.number({}, [7])
assertNumRange(n, 0, 7)
n = yield krandom.number({}, [0]);
assertNumRange(n, 0, 0);
n = await krandom.number({}, [-1.2])
assertNumRange(n, -1.2, 0)
n = yield krandom.number({}, [7]);
assertNumRange(n, 0, 7);
n = await krandom.number({}, [-3, 5])
assertNumRange(n, -3, 5)
n = yield krandom.number({}, [-1.2]);
assertNumRange(n, -1.2, 0);
n = await krandom.integer({}, [-3, 'five'])
assertNumRange(n, -3, 0, true)
n = yield krandom.number({}, [-3, 5]);
assertNumRange(n, -3, 5);
n = await krandom.integer({}, ['four', -8])
assertNumRange(n, -8, 0, true)
n = yield krandom.integer({}, [-3, "five"]);
assertNumRange(n, -3, 0, true);
n = yield krandom.integer({}, ["four", -8]);
assertNumRange(n, -8, 0, true);
n = yield krandom.number({}, [9.87, "-3.6"]);
assertNumRange(n, -3.6, 9.87);
}
//if an assert hasn't thrown up by now, we're good
t.ok(true, "random:integer passed");
t.ok(true, "random:number passed");
}, function(err){
t.end(err);
});
});
n = await krandom.number({}, [9.87, '-3.6'])
assertNumRange(n, -3.6, 9.87)
}
// if an assert hasn't thrown up by now, we're good
t.ok(true, 'random:integer passed')
t.ok(true, 'random:number passed')
}()).then(t.end).catch(t.end)
})

@@ -1,28 +0,27 @@

var mkKRLfn = require("../mkKRLfn");
var mkKRLaction = require("../mkKRLaction");
var mkKRLfn = require('../mkKRLfn')
var mkKRLaction = require('../mkKRLaction')
module.exports = function(core){
return {
def: {
list: mkKRLfn([
], function(ctx, args, callback){
core.db.listScheduled(callback);
}),
module.exports = function (core) {
return {
def: {
list: mkKRLfn([
], function (ctx, args, callback) {
core.db.listScheduled(callback)
}),
remove: mkKRLaction([
"id",
], function(ctx, args, callback){
remove: mkKRLaction([
'id'
], function (ctx, args, callback) {
// if it's a `repeat` we need to stop it
core.scheduler.rmCron(args.id)
//if it's a `repeat` we need to stop it
core.scheduler.rmCron(args.id);
core.db.removeScheduled(args.id, function(err){
if(err && !err.notFound) return callback(err);
//if event `at` we need to update the schedule
core.scheduler.update();
callback(null, err && err.notFound ? false : true);
});
}),
}
};
};
core.db.removeScheduled(args.id, function (err) {
if (err && !err.notFound) return callback(err)
// if event `at` we need to update the schedule
core.scheduler.update()
callback(null, !(err && err.notFound))
})
})
}
}
}

@@ -1,31 +0,21 @@

var test = require("tape");
var cocb = require("co-callback");
var mkTestPicoEngine = require("../mkTestPicoEngine");
var test = require('tape')
var mkTestPicoEngine = require('../mkTestPicoEngine')
test('schedule:remove', function (t) {
mkTestPicoEngine({}, function (err, pe) {
if (err) return t.end(err);
var testPE = function(test_name, genfn){
test(test_name, function(t){
mkTestPicoEngine({}, function(err, pe){
if(err) return t.end(err);
(async function () {
var remove = await pe.modules.get({}, 'schedule', 'remove')
cocb.run(function*(){
yield genfn(t, pe);
}, t.end);
});
});
};
var val = await pe.scheduleEventAtYieldable(new Date(), {
domain: 'd',
type: 't',
attributes: {}
})
testPE("schedule:remove", function*(t, pe){
var remove = yield pe.modules.get({}, "schedule", "remove");
var val = yield pe.scheduleEventAtYieldable(new Date(), {
domain: "d",
type: "t",
attributes: {},
});
t.deepEquals(yield remove({}, [val.id]), [true]);
t.deepEquals(yield remove({}, ["404"]), [false]);
});
t.deepEquals(await remove({}, [val.id]), [true])
t.deepEquals(await remove({}, ['404']), [false])
}()).then(t.end).catch(t.end)
})
})

@@ -1,117 +0,117 @@

var _ = require("lodash");
var moment = require("moment-timezone");
var ktypes = require("krl-stdlib/types");
var mkKRLfn = require("../mkKRLfn");
var strftime = require("strftime");
var _ = require('lodash')
var moment = require('moment-timezone')
var ktypes = require('krl-stdlib/types')
var mkKRLfn = require('../mkKRLfn')
var strftime = require('strftime')
var newDate = function(date_str, parse_utc){
var parse = function(str){
return parse_utc
? moment.utc(str, moment.ISO_8601)
: moment(str, moment.ISO_8601);
};
var d = parse(date_str);
if(!d.isValid()){
var today = (new Date()).toISOString().split("T")[0];
d = parse(today + "T" + date_str);
if(!d.isValid()){
d = parse(today.replace(/-/g, "") + "T" + date_str);
}
var newDate = function (dateStr, parseUtc) {
var parse = function (str) {
return parseUtc
? moment.utc(str, moment.ISO_8601)
: moment(str, moment.ISO_8601)
}
var d = parse(dateStr)
if (!d.isValid()) {
var today = (new Date()).toISOString().split('T')[0]
d = parse(today + 'T' + dateStr)
if (!d.isValid()) {
d = parse(today.replace(/-/g, '') + 'T' + dateStr)
}
if(!d.isValid()){
return null; // invalid date string date_str
}
return d;
};
}
if (!d.isValid()) {
return null // invalid date string dateStr
}
return d
}
module.exports = function(core){
return {
def: {
now: mkKRLfn([
"opts",
], function(ctx, args, callback){
var d = moment();
if(_.has(args, "opts")){
if(!ktypes.isMap(args.opts)){
return callback(new TypeError("time:now was given " + ktypes.toString(args.opts) + " instead of an opts map"));
}
if(_.has(args.opts, "tz")){
d.tz(args.opts.tz);
}
}
callback(null, d.toISOString());
}),
"new": mkKRLfn([
"date",
], function(ctx, args, callback){
if(!_.has(args, "date")){
return callback(new Error("time:new needs a date string"));
}
module.exports = function (core) {
return {
def: {
now: mkKRLfn([
'opts'
], function (ctx, args, callback) {
var d = moment()
if (_.has(args, 'opts')) {
if (!ktypes.isMap(args.opts)) {
return callback(new TypeError('time:now was given ' + ktypes.toString(args.opts) + ' instead of an opts map'))
}
if (_.has(args.opts, 'tz')) {
d.tz(args.opts.tz)
}
}
callback(null, d.toISOString())
}),
'new': mkKRLfn([
'date'
], function (ctx, args, callback) {
if (!_.has(args, 'date')) {
return callback(new Error('time:new needs a date string'))
}
var dateStr = ktypes.toString(args.date);
var d = newDate(dateStr, true);
if(d === null){
if(ktypes.isString(args.date)){
return callback(new Error("time:new was given an invalid date string (" + dateStr + ")"));
}
return callback(new TypeError("time:new was given " + ktypes.toString(dateStr) + " instead of a date string"));
}
callback(null, d.toISOString());
}),
"add": mkKRLfn([
"date",
"spec",
], function(ctx, args, callback){
if(!_.has(args, "date")){
return callback(new Error("time:add needs a date string"));
}
if(!_.has(args, "spec")){
return callback(new Error("time:add needs a spec map"));
}
var dateStr = ktypes.toString(args.date)
var d = newDate(dateStr, true)
if (d === null) {
if (ktypes.isString(args.date)) {
return callback(new Error('time:new was given an invalid date string (' + dateStr + ')'))
}
return callback(new TypeError('time:new was given ' + ktypes.toString(dateStr) + ' instead of a date string'))
}
callback(null, d.toISOString())
}),
'add': mkKRLfn([
'date',
'spec'
], function (ctx, args, callback) {
if (!_.has(args, 'date')) {
return callback(new Error('time:add needs a date string'))
}
if (!_.has(args, 'spec')) {
return callback(new Error('time:add needs a spec map'))
}
var dateStr = ktypes.toString(args.date);
var d = newDate(dateStr, true);
if(d === null){
if(ktypes.isString(args.date)){
return callback(new Error("time:add was given an invalid date string (" + dateStr + ")"));
}
return callback(new TypeError("time:add was given " + ktypes.toString(dateStr) + " instead of a date string"));
}
var dateStr = ktypes.toString(args.date)
var d = newDate(dateStr, true)
if (d === null) {
if (ktypes.isString(args.date)) {
return callback(new Error('time:add was given an invalid date string (' + dateStr + ')'))
}
return callback(new TypeError('time:add was given ' + ktypes.toString(dateStr) + ' instead of a date string'))
}
if(!ktypes.isMap(args.spec)){
return callback(new TypeError("time:add was given " + ktypes.toString(args.spec) + " instead of a spec map"));
}
if (!ktypes.isMap(args.spec)) {
return callback(new TypeError('time:add was given ' + ktypes.toString(args.spec) + ' instead of a spec map'))
}
d.add(args.spec);
d.add(args.spec)
callback(null, d.toISOString());
}),
"strftime": mkKRLfn([
"date",
"fmt",
], function(ctx, args, callback){
if(!_.has(args, "date")){
return callback(new Error("time:strftime needs a date string"));
}
if(!_.has(args, "fmt")){
return callback(new Error("time:strftime needs a fmt string"));
}
callback(null, d.toISOString())
}),
'strftime': mkKRLfn([
'date',
'fmt'
], function (ctx, args, callback) {
if (!_.has(args, 'date')) {
return callback(new Error('time:strftime needs a date string'))
}
if (!_.has(args, 'fmt')) {
return callback(new Error('time:strftime needs a fmt string'))
}
var dateStr = ktypes.toString(args.date);
var d = newDate(dateStr);
if(d === null){
if(ktypes.isString(args.date)){
return callback(new Error("time:strftime was given an invalid date string (" + dateStr + ")"));
}
return callback(new TypeError("time:strftime was given " + ktypes.toString(dateStr) + " instead of a date string"));
}
var dateStr = ktypes.toString(args.date)
var d = newDate(dateStr)
if (d === null) {
if (ktypes.isString(args.date)) {
return callback(new Error('time:strftime was given an invalid date string (' + dateStr + ')'))
}
return callback(new TypeError('time:strftime was given ' + ktypes.toString(dateStr) + ' instead of a date string'))
}
if(!ktypes.isString(args.fmt)){
return callback(new TypeError("time:strftime was given " + ktypes.toString(args.fmt) + " instead of a fmt string"));
}
if (!ktypes.isString(args.fmt)) {
return callback(new TypeError('time:strftime was given ' + ktypes.toString(args.fmt) + ' instead of a fmt string'))
}
callback(null, strftime(args.fmt, d.toDate()));
}),
}
};
};
callback(null, strftime(args.fmt, d.toDate()))
})
}
}
}

@@ -1,116 +0,114 @@

var test = require("tape");
var time = require("./time")().def;
var cocb = require("co-callback");
var testErr = require("../testErr");
var test = require('tape')
var time = require('./time')().def
var testErr = require('../testErr')
test("time module", function(t){
var terr = testErr(t, time);
test('time module', function (t) {
var terr = testErr(t, time);
cocb.run(function*(){
var ctx = {};
(async function () {
var ctx = {}
var now0 = yield time.now(ctx, []);
var now1 = yield time.now(ctx, [
{tz: "Australia/Sydney"}
]);
t.ok(/^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d\.\d\d\dZ$/.test(now0));
t.ok(/^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d\.\d\d\dZ$/.test(now1));
var now0 = await time.now(ctx, [])
var now1 = await time.now(ctx, [
{tz: 'Australia/Sydney'}
])
t.ok(/^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d\.\d\d\dZ$/.test(now0))
t.ok(/^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d\.\d\d\dZ$/.test(now1))
yield terr("now", ctx, [[]], "TypeError: time:now was given [Array] instead of an opts map");
await terr('now', ctx, [[]], 'TypeError: time:now was given [Array] instead of an opts map')
t.equals(
yield time["new"](ctx, ["2010-08-08"]),
"2010-08-08T00:00:00.000Z",
"Date only-defaults to 00:00"
);
t.equals(
await time['new'](ctx, ['2010-08-08']),
'2010-08-08T00:00:00.000Z',
'Date only-defaults to 00:00'
)
t.equals(
yield time["new"](ctx, ["1967342"]),
"1967-12-08T00:00:00.000Z",
"Year DayOfYear"
);
t.equals(
await time['new'](ctx, ['1967342']),
'1967-12-08T00:00:00.000Z',
'Year DayOfYear'
)
t.equals(
yield time["new"](ctx, [1967342]),
"1967-12-08T00:00:00.000Z",
"Year DayOfYear"
);
t.equals(
await time['new'](ctx, [1967342]),
'1967-12-08T00:00:00.000Z',
'Year DayOfYear'
)
t.equals(
yield time["new"](ctx, ["2011W206T1345-0600"]),
"2011-05-21T19:45:00.000Z",
"Year WeekOfYear DayOfWeek"
);
t.equals(
await time['new'](ctx, ['2011W206T1345-0600']),
'2011-05-21T19:45:00.000Z',
'Year WeekOfYear DayOfWeek'
)
t.equals(
yield time["new"](ctx, ["083023Z"]),
(new Date()).toISOString().split("T")[0] + "T08:30:23.000Z",
"Time only-defaults to today"
);
t.equals(
await time['new'](ctx, ['083023Z']),
(new Date()).toISOString().split('T')[0] + 'T08:30:23.000Z',
'Time only-defaults to today'
)
yield terr("new", ctx, [], "Error: time:new needs a date string");
yield terr("new", ctx, [67342], "TypeError: time:new was given 67342 instead of a date string");
yield terr("new", ctx, ["67342"], "Error: time:new was given an invalid date string (67342)");
await terr('new', ctx, [], 'Error: time:new needs a date string')
await terr('new', ctx, [67342], 'TypeError: time:new was given 67342 instead of a date string')
await terr('new', ctx, ['67342'], 'Error: time:new was given an invalid date string (67342)')
t.equals(
yield time["add"](ctx, ["2017-01-01", {years: -2017}]),
"0000-01-01T00:00:00.000Z"
);
t.equals(
yield time["add"](ctx, ["2017-01-01", {months: -22}]),
"2015-03-01T00:00:00.000Z"
);
t.equals(
yield time["add"](ctx, ["2010-08-08", {weeks: 5}]),
"2010-09-12T00:00:00.000Z"
);
t.equals(
yield time["add"](ctx, ["2010-08-08T05:00:00", {hours: 3}]),
"2010-08-08T08:00:00.000Z"
);
t.equals(
yield time["add"](ctx, ["2017-01-01", {days: -10}]),
"2016-12-22T00:00:00.000Z"
);
t.equals(
yield time["add"](ctx, ["2017-01-01", {minutes: 2, seconds: 90}]),
"2017-01-01T00:03:30.000Z"
);
t.equals(
await time['add'](ctx, ['2017-01-01', {years: -2017}]),
'0000-01-01T00:00:00.000Z'
)
t.equals(
await time['add'](ctx, ['2017-01-01', {months: -22}]),
'2015-03-01T00:00:00.000Z'
)
t.equals(
await time['add'](ctx, ['2010-08-08', {weeks: 5}]),
'2010-09-12T00:00:00.000Z'
)
t.equals(
await time['add'](ctx, ['2010-08-08T05:00:00', {hours: 3}]),
'2010-08-08T08:00:00.000Z'
)
t.equals(
await time['add'](ctx, ['2017-01-01', {days: -10}]),
'2016-12-22T00:00:00.000Z'
)
t.equals(
await time['add'](ctx, ['2017-01-01', {minutes: 2, seconds: 90}]),
'2017-01-01T00:03:30.000Z'
)
t.equals(
yield time["add"](ctx, [1967342, {"seconds": "five"}]),
"1967-12-08T00:00:00.000Z"
);
t.equals(
yield time["add"](ctx, [1967342, {"secondz": 5}]),
"1967-12-08T00:00:00.000Z"
);
t.equals(
await time['add'](ctx, [1967342, {'seconds': 'five'}]),
'1967-12-08T00:00:00.000Z'
)
t.equals(
await time['add'](ctx, [1967342, {'secondz': 5}]),
'1967-12-08T00:00:00.000Z'
)
yield terr("add", ctx, {"spec": {}}, "Error: time:add needs a date string");
yield terr("add", ctx, [67342], "Error: time:add needs a spec map");
yield terr("add", ctx, [67342, 5], "TypeError: time:add was given 67342 instead of a date string");
yield terr("add", ctx, ["67342", 5], "Error: time:add was given an invalid date string (67342)");
yield terr("add", ctx, ["2017-01-01", []], "TypeError: time:add was given [Array] instead of a spec map");
await terr('add', ctx, {'spec': {}}, 'Error: time:add needs a date string')
await terr('add', ctx, [67342], 'Error: time:add needs a spec map')
await terr('add', ctx, [67342, 5], 'TypeError: time:add was given 67342 instead of a date string')
await terr('add', ctx, ['67342', 5], 'Error: time:add was given an invalid date string (67342)')
await terr('add', ctx, ['2017-01-01', []], 'TypeError: time:add was given [Array] instead of a spec map')
var xTime = "2010-10-06T18:25:55";
t.equals(
yield time["strftime"](ctx, [xTime, "%F %T"]),
"2010-10-06 18:25:55"
);
t.equals(
yield time["strftime"](ctx, [xTime, "%A %d %b %Y"]),
"Wednesday 06 Oct 2010"
);
t.equals(
yield time["strftime"](ctx, [xTime, "year month"]),
"year month"
);
var xTime = '2010-10-06T18:25:55'
t.equals(
await time['strftime'](ctx, [xTime, '%F %T']),
'2010-10-06 18:25:55'
)
t.equals(
await time['strftime'](ctx, [xTime, '%A %d %b %Y']),
'Wednesday 06 Oct 2010'
)
t.equals(
await time['strftime'](ctx, [xTime, 'year month']),
'year month'
)
yield terr("strftime", ctx, [], "Error: time:strftime needs a date string");
yield terr("strftime", ctx, [67342], "Error: time:strftime needs a fmt string");
yield terr("strftime", ctx, [67342, "%F %T"], "TypeError: time:strftime was given 67342 instead of a date string");
yield terr("strftime", ctx, ["67342", "%F %T"], "Error: time:strftime was given an invalid date string (67342)");
yield terr("strftime", ctx, ["1967342", ["%A %d %b %Y"]], "TypeError: time:strftime was given [Array] instead of a fmt string");
}, t.end);
});
await terr('strftime', ctx, [], 'Error: time:strftime needs a date string')
await terr('strftime', ctx, [67342], 'Error: time:strftime needs a fmt string')
await terr('strftime', ctx, [67342, '%F %T'], 'TypeError: time:strftime was given 67342 instead of a date string')
await terr('strftime', ctx, ['67342', '%F %T'], 'Error: time:strftime was given an invalid date string (67342)')
await terr('strftime', ctx, ['1967342', ['%A %d %b %Y']], 'TypeError: time:strftime was given [Array] instead of a fmt string')
}()).then(t.end).catch(t.end)
})

@@ -1,13 +0,13 @@

var _ = require("lodash");
var _ = require('lodash')
module.exports = function(param_order, krl_args){
var args = {};
_.each(krl_args, function(arg, key){
if(_.has(param_order, key)){
args[param_order[key]] = arg;
}else if(_.includes(param_order, key)){
args[key] = arg;
}
});
return args;
};
module.exports = function (paramOrder, krlArgs) {
var args = {}
_.each(krlArgs, function (arg, key) {
if (_.has(paramOrder, key)) {
args[paramOrder[key]] = arg
} else if (_.includes(paramOrder, key)) {
args[key] = arg
}
})
return args
}

@@ -1,27 +0,36 @@

var _ = require("lodash");
var async = require("async");
var _ = require('lodash')
var async = require('async')
module.exports = function(worker){
module.exports = function (worker) {
var queues = {}
var pico_queues = {};
var getQ = function (picoId) {
if (!_.has(queues, picoId)) {
var q = async.queue(function (job, done) {
job = JSON.parse(job)
worker(picoId, job.type, job.data)
.then(function (val) {
done(null, val)
})
.catch(function (err) {
process.nextTick(function () {
// wrapping in nextTick resolves strange issues with UnhandledPromiseRejectionWarning
// when infact we are handling the rejection
done(err)
})
})
})
queues[picoId] = q
}
return queues[picoId]
}
var getQ = function(pico_id){
if(!_.has(pico_queues, pico_id)){
var q = async.queue(function(job, done){
job = JSON.parse(job);
worker(pico_id, job.type, job.data, done);
});
pico_queues[pico_id] = q;
}
return pico_queues[pico_id];
};
return {
enqueue: function(pico_id, type, data, callback){
getQ(pico_id).push(JSON.stringify({
type: type,
data: data,
}), callback);
}
};
};
return {
enqueue: function (picoId, type, data, callback) {
getQ(picoId).push(JSON.stringify({
type: type,
data: data
}), callback)
}
}
}

@@ -1,83 +0,82 @@

var test = require("tape");
var PicoQueue = require("./PicoQueue");
var test = require('tape')
var PicoQueue = require('./PicoQueue')
test("PicoQueue", function(t){
function nextTick () {
return new Promise(function (resolve) {
process.nextTick(resolve)
})
}
var log = [];
test('PicoQueue', function (t) {
var log = []
var pq = PicoQueue(function(pico_id, type, data, callback){
log.push("working_0 [" + pico_id + "] " + data);
process.nextTick(function(){
log.push("working_1 [" + pico_id + "] " + data);
process.nextTick(function(){
log.push("working_2 [" + pico_id + "] " + data);
callback();
});
});
});
var pq = PicoQueue(async function (picoId, type, data) {
log.push('working_0 [' + picoId + '] ' + data)
await nextTick()
log.push('working_1 [' + picoId + '] ' + data)
await nextTick()
log.push('working_2 [' + picoId + '] ' + data)
})
var enqueue = function(pico_id, data, done){
log.push("enqueue [" + pico_id + "] " + data);
pq.enqueue(pico_id, "test", data, function(){
log.push("done [" + pico_id + "] " + data);
if(done){
done();
}
});
};
var enqueue = function (picoId, data, done) {
log.push('enqueue [' + picoId + '] ' + data)
pq.enqueue(picoId, 'test', data, function () {
log.push('done [' + picoId + '] ' + data)
if (done) {
done()
}
})
}
enqueue('A', 0)
enqueue('A', 1)
enqueue('B', 0)
enqueue('A', 2, function () {
t.deepEquals(log, [
'enqueue [A] 0',
'enqueue [A] 1',
'enqueue [B] 0',
'enqueue [A] 2',
'working_0 [A] 0',
'working_0 [B] 0', // different picos can be concurrent
'working_1 [A] 0',
'working_1 [B] 0',
'working_2 [A] 0', // Now pico A finished work on event 0
'working_2 [B] 0',
'done [A] 0',
'working_0 [A] 1', // Now pico A can start on event 1
'done [B] 0',
'working_1 [A] 1',
'working_2 [A] 1',
'done [A] 1',
'working_0 [A] 2',
'working_1 [A] 2',
'working_2 [A] 2',
'done [A] 2'
])
t.end()
})
})
enqueue("A", 0);
enqueue("A", 1);
enqueue("B", 0);
enqueue("A", 2, function(){
t.deepEquals(log, [
"enqueue [A] 0",
"enqueue [A] 1",
"enqueue [B] 0",
"enqueue [A] 2",
"working_0 [A] 0",
"working_0 [B] 0",//different picos can be concurrent
"working_1 [A] 0",
"working_1 [B] 0",
"working_2 [A] 0",//Now pico A finished work on event 0
"done [A] 0",
"working_0 [A] 1",//Now pico A can start on event 1
"working_2 [B] 0",
"done [B] 0",
"working_1 [A] 1",
"working_2 [A] 1",
"done [A] 1",
"working_0 [A] 2",
"working_1 [A] 2",
"working_2 [A] 2",
"done [A] 2"
]);
t.end();
});
});
test("PicoQueue - error", function(t){
var pq = PicoQueue(function(pico_id, type, data, callback){
process.nextTick(function(){
if(data === "foobar"){
callback(new Error(data));
return;
}
callback(null, data);
});
});
t.plan(6);
pq.enqueue("A", "test", "baz", function(err, data){
t.equals(err, null);
t.equals(data, "baz");
});
pq.enqueue("A", "test", "foobar", function(err, data){
t.equals(err + "", "Error: foobar");
t.equals(data, void 0);
});
pq.enqueue("A", "test", "qux", function(err, data){
t.equals(err, null);
t.equals(data, "qux");
});
});
test('PicoQueue - error', function (t) {
var pq = PicoQueue(async function (picoId, type, data) {
await nextTick()
if (data === 'foobar') {
throw new Error(data)
}
return data
})
t.plan(6)
pq.enqueue('A', 'test', 'baz', function (err, data) {
t.equals(err, null)
t.equals(data, 'baz')
})
pq.enqueue('A', 'test', 'foobar', function (err, data) {
t.equals(err + '', 'Error: foobar')
t.equals(data, void 0)
})
pq.enqueue('A', 'test', 'qux', function (err, data) {
t.equals(err, null)
t.equals(data, 'qux')
})
})

@@ -1,69 +0,60 @@

var _ = require("lodash");
var cocb = require("co-callback");
var ktypes = require("krl-stdlib/types");
var runKRL = require("./runKRL");
var runAction = require("./runAction");
var selectRulesToEval = require("./selectRulesToEval");
var _ = require('lodash')
var ktypes = require('krl-stdlib/types')
var runKRL = require('./runKRL')
var runAction = require('./runAction')
var selectRulesToEval = require('./selectRulesToEval')
var scheduleEvent = function(core, ctx, args, callback){
if(!_.has(ctx, ["event", "eci"])){
callback(new Error("schedule:event must be executed in response to an event"));
return;
}
var event = {
eci: ctx.event.eci,//in theory we are only running in an event postlude
eid: ctx.event.eid,
domain: args.domain,
type: args.type,
attrs: args.attributes,
};
async function scheduleEvent (core, ctx, args) {
if (!_.has(ctx, ['event', 'eci'])) {
throw new Error('schedule:event must be executed in response to an event')
}
var event = {
eci: ctx.event.eci, // in theory we are only running in an event postlude
eid: ctx.event.eid,
domain: args.domain,
type: args.type,
attrs: args.attributes
}
if(false
|| args.at !== void 0 && args.timespec !== void 0
|| args.at === void 0 && args.timespec === void 0
){
callback(new Error("schedule:event must use `at` -or- `timespec`"));
return;
if (false ||
(args.at !== void 0 && args.timespec !== void 0) ||
(args.at === void 0 && args.timespec === void 0)
) {
throw new Error('schedule:event must use `at` -or- `timespec`')
}
var val
if (args.at !== void 0) {
var at = new Date(args.at)
if (at.toISOString() !== args.at) {
throw new Error('schedule:event at must be an ISO date string (i.e. `.toISOString()`)')
}
if(args.at !== void 0){
var at = new Date(args.at);
if(at.toISOString() !== args.at){
callback(new Error("schedule:event at must be an ISO date string (i.e. `.toISOString()`)"));
return;
}
core.db.scheduleEventAt(at, event, function(err, val){
if(err) return callback(err);
core.scheduler.update();
callback(null, val.id);
});
return;
}
if(!_.isString(args.timespec)){
//TODO parse it to ensure it's shaped right
callback(new Error("schedule:event `timespec` must be a cron format string"));
return;
}
core.db.scheduleEventRepeat(args.timespec, event, function(err, val){
if(err) return callback(err);
core.scheduler.addCron(val.timespec, val.id, val.event);
callback(null, val.id);
});
};
val = await core.db.scheduleEventAtYieldable(at, event)
core.scheduler.update()
return val.id
}
if (!_.isString(args.timespec)) {
// TODO parse it to ensure it's shaped right
throw new Error('schedule:event `timespec` must be a cron format string')
}
val = await core.db.scheduleEventRepeatYieldable(args.timespec, event)
core.scheduler.addCron(val.timespec, val.id, val.event)
return val.id
}
var toResponse = function(ctx, type, val){
if(type === "directive"){
return {
type: "directive",
options: val.options,
name: val.name,
meta: {
rid: ctx.rid,
rule_name: ctx.rule_name,
txn_id: ctx.txn_id,
eid: ctx.event.eid
}
};
function toResponse (ctx, type, val) {
if (type === 'directive') {
return {
type: 'directive',
options: val.options,
name: val.name,
meta: {
rid: ctx.rid,
rule_name: ctx.rule_name,
txn_id: ctx.txn_id,
eid: ctx.event.eid
}
}
throw new Error("Unsupported action response type: " + type);
};
}
throw new Error('Unsupported action response type: ' + type)
}

@@ -74,174 +65,155 @@ /**

*/
function toPairs(v){
if(ktypes.isArray(v)){
var pairs = [];
var i;
for(i = 0; i < v.length; i++){
pairs.push([i, v[i]]);
}
return pairs;
function toPairs (v) {
if (ktypes.isArray(v)) {
var pairs = []
var i
for (i = 0; i < v.length; i++) {
pairs.push([i, v[i]])
}
return _.toPairs(v);
return pairs
}
return _.toPairs(v)
}
function runRuleBody (core, ruleBodyFns, scheduled) {
var rule = scheduled.rule
var picoId = scheduled.pico_id
var event = scheduled.event
var runRuleBody = cocb.wrap(function*(core, rule_body_fns, scheduled){
var ctx = core.mkCTX({
rid: rule.rid,
rule_name: rule.name,
scope: rule.scope,
pico_id: picoId,
event: event,
var rule = scheduled.rule;
var pico_id = scheduled.pico_id;
var event = scheduled.event;
raiseEvent: ruleBodyFns.raiseEvent,
raiseError: ruleBodyFns.raiseError,
scheduleEvent: ruleBodyFns.scheduleEvent,
addActionResponse: ruleBodyFns.addActionResponse,
stopRulesetExecution: ruleBodyFns.stopRulesetExecution
})
var ctx = core.mkCTX({
rid: rule.rid,
rule_name: rule.name,
scope: rule.scope,
pico_id: pico_id,
event: event,
ctx.emit('debug', 'rule selected: ' + rule.rid + ' -> ' + rule.name)
raiseEvent: rule_body_fns.raiseEvent,
raiseError: rule_body_fns.raiseError,
scheduleEvent: rule_body_fns.scheduleEvent,
addActionResponse: rule_body_fns.addActionResponse,
stopRulesetExecution: rule_body_fns.stopRulesetExecution,
});
return runKRL(rule.body, ctx, runAction, toPairs)
}
ctx.emit("debug", "rule selected: " + rule.rid + " -> " + rule.name);
module.exports = async function processEvent (core, ctx) {
ctx.emit('debug', 'event being processed')
yield runKRL(rule.body, ctx, runAction, toPairs);
});
// the schedule is the list of rules and events that need to be processed
var schedule = []
var responses = []// i.e. directives
var processEvent = cocb.wrap(function*(core, ctx){
ctx.emit("debug", "event being processed");
var addEventToSchedule = async function (ctx) {
var rules = await selectRulesToEval(core, ctx)
_.each(rules, function (rule) {
ctx.emit('debug', 'rule added to schedule: ' + rule.rid + ' -> ' + rule.name)
schedule.push({
rule: rule,
event: ctx.event,
pico_id: ctx.pico_id
})
})
if (schedule.length === 0) {
ctx.emit('debug', 'no rules added to schedule')
}
}
//the schedule is the list of rules and events that need to be processed
var schedule = [];
var responses = [];//i.e. directives
await addEventToSchedule(ctx)
var addEventToSchedule = function(ctx, callback){
selectRulesToEval(core, ctx, function(err, rules){
if(err) return callback(err);
_.each(rules, function(rule){
ctx.emit("debug", "rule added to schedule: " + rule.rid + " -> " + rule.name);
schedule.push({
rule: rule,
event: ctx.event,
pico_id: ctx.pico_id,
});
});
if(schedule.length === 0){
ctx.emit("debug", "no rules added to schedule");
}
callback();
});
};
// these are special functions only to be used inside a rule body
var ruleBodyFns = {
raiseEvent: async function (revent) {
// shape the revent like a normal event
var event = {
eci: ctx.event.eci, // raise event is always to the same pico
eid: ctx.event.eid, // inherit from parent event to aid in debugging
domain: revent.domain,
type: revent.type,
attrs: revent.attributes,
for_rid: revent.for_rid,
txn_id: ctx.event.txn_id, // inherit from parent event
timestamp: new Date()
}
// must make a new ctx for this raise b/c it's a different event
var raiseCtx = core.mkCTX({
event: event,
pico_id: ctx.pico_id// raise event is always to the same pico
})
raiseCtx.emit('debug', 'adding raised event to schedule: ' + revent.domain + '/' + revent.type)
await addEventToSchedule(raiseCtx)
},
raiseError: function (ctx, level, data) {
if (level === 'error') {
// clear the schedule so no more rules are run
schedule = []
}
yield cocb.wrap(addEventToSchedule)(ctx);
//these are special functions only to be used inside a rule body
var rule_body_fns = {
raiseEvent: cocb.wrap(function(revent, callback){
//shape the revent like a normal event
var event = {
eci: ctx.event.eci,//raise event is always to the same pico
eid: ctx.event.eid,//inherit from parent event to aid in debugging
domain: revent.domain,
type: revent.type,
attrs: revent.attributes,
for_rid: revent.for_rid,
txn_id: ctx.event.txn_id,//inherit from parent event
timestamp: new Date()
};
//must make a new ctx for this raise b/c it's a different event
var raise_ctx = core.mkCTX({
event: event,
pico_id: ctx.pico_id,//raise event is always to the same pico
});
raise_ctx.emit("debug", "adding raised event to schedule: " + revent.domain + "/" + revent.type);
addEventToSchedule(raise_ctx, callback);
}),
raiseError: function*(ctx, level, data){
if(level === "error"){
//clear the schedule so no more rules are run
schedule = [];
}
return yield ctx.raiseEvent({
domain: "system",
type: "error",
attributes: {
level: level,
data: data,
rid: ctx.rid,
rule_name: ctx.rule_name,
genus: "user",
//species: ??,
},
for_rid: ctx.rid,
});
return ctx.raiseEvent({
domain: 'system',
type: 'error',
attributes: {
level: level,
data: data,
rid: ctx.rid,
rule_name: ctx.rule_name,
genus: 'user'
// species: ??,
},
scheduleEvent: cocb.wrap(function(sevent, callback){
scheduleEvent(core, ctx, {
domain: sevent.domain,
type: sevent.type,
attributes: sevent.attributes,
for_rid: ctx.rid
})
},
scheduleEvent: function (sevent) {
return scheduleEvent(core, ctx, {
domain: sevent.domain,
type: sevent.type,
attributes: sevent.attributes,
at: sevent.at,
timespec: sevent.timespec,
}, callback);
}),
addActionResponse: function(ctx, type, val){
var resp = toResponse(ctx, type, val);
responses.push(resp);
return resp;
},
stopRulesetExecution: function(ctx){
ctx.emit("debug", "`last` control statement is stopping ruleset execution");
schedule = _.dropWhile(schedule, function(s){
return s.rule.rid === ctx.rid;
});
},
};
//using a while loop b/c schedule is MUTABLE
//Durring execution new events may be `raised` that will mutate the schedule
while(schedule.length > 0){
yield runRuleBody(core, rule_body_fns, schedule.shift());
at: sevent.at,
timespec: sevent.timespec
})
},
addActionResponse: function (ctx, type, val) {
var resp = toResponse(ctx, type, val)
responses.push(resp)
return resp
},
stopRulesetExecution: function (ctx) {
ctx.emit('debug', '`last` control statement is stopping ruleset execution')
schedule = _.dropWhile(schedule, function (s) {
return s.rule.rid === ctx.rid
})
}
}
var res_by_type = _.groupBy(responses, "type");
// using a while loop b/c schedule is MUTABLE
// Durring execution new events may be `raised` that will mutate the schedule
while (schedule.length > 0) {
await runRuleBody(core, ruleBodyFns, schedule.shift())
}
var r = _.mapValues(res_by_type, function(responses, key){
if(key === "directive"){
return _.map(responses, function(d){
return _.omit(d, "type");
});
}
return responses;
});
var resByType = _.groupBy(responses, 'type')
if(_.has(r, "directive")){
r.directives = r.directive;
delete r.directive;
}else{
//we always want to return a directives array even if it's empty
r.directives = [];
var r = _.mapValues(resByType, function (responses, key) {
if (key === 'directive') {
return _.map(responses, function (d) {
return _.omit(d, 'type')
})
}
return responses
})
ctx.emit("debug", "event finished processing");
if (_.has(r, 'directive')) {
r.directives = r.directive
delete r.directive
} else {
// we always want to return a directives array even if it's empty
r.directives = []
}
return r;
});
ctx.emit('debug', 'event finished processing')
module.exports = function(core, ctx, callback){
processEvent(core, ctx).then(function(data){
callback(null, data);
}, function(err){
process.nextTick(function(){
//wrapping in nextTick resolves strange issues with UnhandledPromiseRejectionWarning
//when infact we are handling the rejection
callback(err);
});
});
};
return r
}

@@ -1,61 +0,47 @@

var _ = require("lodash");
var cocb = require("co-callback");
var runKRL = require("./runKRL");
var ktypes = require("krl-stdlib/types");
var _ = require('lodash')
var runKRL = require('./runKRL')
var ktypes = require('krl-stdlib/types')
module.exports = function(core, ctx, callback){
cocb.run(function*(){
module.exports = async function processQuery (core, ctx) {
await core.db.assertPicoIDYieldable(ctx.pico_id)
yield core.db.assertPicoIDYieldable(ctx.pico_id);
var picoRids = await core.db.ridsOnPicoYieldable(ctx.pico_id)
if (picoRids[ctx.query.rid] !== true) {
throw new Error('Pico does not have that rid: ' + ctx.query.rid)
}
var pico_rids = yield core.db.ridsOnPicoYieldable(ctx.pico_id);
if(pico_rids[ctx.query.rid] !== true){
throw new Error("Pico does not have that rid: " + ctx.query.rid);
}
var err
var rs = core.rsreg.get(ctx.query.rid)
if (!rs) {
err = new Error('RID not found: ' + ctx.query.rid)
err.notFound = true
throw err
}
var shares = _.get(rs, ['meta', 'shares'])
if (!_.isArray(shares) || !_.includes(shares, ctx.query.name)) {
throw new Error('Not shared: ' + ctx.query.name)
}
if (!rs.scope.has(ctx.query.name)) {
err = new Error('Shared, but not defined: ' + ctx.query.name)
err.notFound = true
throw err
}
var err;
var rs = core.rsreg.get(ctx.query.rid);
if(!rs){
err = new Error("RID not found: " + ctx.query.rid);
err.notFound = true;
throw err;
}
var shares = _.get(rs, ["meta", "shares"]);
if(!_.isArray(shares) || !_.includes(shares, ctx.query.name)){
throw new Error("Not shared: " + ctx.query.name);
}
if(!rs.scope.has(ctx.query.name)){
err = new Error("Shared, but not defined: " + ctx.query.name);
err.notFound = true;
throw err;
}
////////////////////////////////////////////////////////////////////////
ctx = core.mkCTX({
query: ctx.query,
pico_id: ctx.pico_id,
rid: rs.rid,
scope: rs.scope,
});
var val = ctx.scope.get(ctx.query.name);
if(_.isFunction(val)){
val = yield runKRL(function*(ctx, args){
//use ctx.applyFn so it behaves like any other fn call
//i.e. errors on trying defaction like a function
return yield ctx.applyFn(val, ctx, args);
}, ctx, ctx.query.args);
}
// To ensure we don't leak out functions etc.
return ktypes.decode(ktypes.encode(val));
}, function(err, val){
if(err){
process.nextTick(function(){
//wrapping in nextTick resolves strange issues with UnhandledPromiseRejectionWarning
//when infact we are handling the rejection
callback(err);
});
return;
}
callback(null, val);
});
};
/// /////////////////////////////////////////////////////////////////////
ctx = core.mkCTX({
query: ctx.query,
pico_id: ctx.pico_id,
rid: rs.rid,
scope: rs.scope
})
var val = ctx.scope.get(ctx.query.name)
if (_.isFunction(val)) {
val = await runKRL(function (ctx, args) {
// use ctx.applyFn so it behaves like any other fn call
// i.e. errors on trying defaction like a function
return ctx.applyFn(val, ctx, args)
}, ctx, ctx.query.args)
}
// To ensure we don't leak out functions etc.
return ktypes.decode(ktypes.encode(val))
}

@@ -1,126 +0,125 @@

var _ = require("lodash");
var _ = require('lodash')
var SalienceGraph = function(){
var graph = {};
var put = function(rs){
del(rs.rid);//clear out the old one, if pressent
_.each(rs.rules, function(rule){
rule.rid = rs.rid;
_.each(rule.select && rule.select.graph, function(g, domain){
_.each(g, function(exprs, type){
_.set(graph, [domain, type, rule.rid, rule.name], true);
});
});
});
};
var get = function(domain, type){
return _.get(graph, [domain, type], {});
};
var del = function(rid){
_.each(graph, function(data_d, domain){
_.each(data_d, function(data_t, type){
//clear out any old versions graph
_.unset(graph, [domain, type, rid]);
});
});
};
return Object.freeze({
put: put,
get: get,
del: del,
});
};
var SalienceGraph = function () {
var graph = {}
var put = function (rs) {
del(rs.rid)// clear out the old one, if pressent
_.each(rs.rules, function (rule) {
rule.rid = rs.rid
_.each(rule.select && rule.select.graph, function (g, domain) {
_.each(g, function (exprs, type) {
_.set(graph, [domain, type, rule.rid, rule.name], true)
})
})
})
}
var get = function (domain, type) {
return _.get(graph, [domain, type], {})
}
var del = function (rid) {
_.each(graph, function (dataD, domain) {
_.each(dataD, function (dataT, type) {
// clear out any old versions graph
_.unset(graph, [domain, type, rid])
})
})
}
return Object.freeze({
put: put,
get: get,
del: del
})
}
module.exports = function(){
module.exports = function () {
var rulesets = {}
var salienceGraph = SalienceGraph()
var keysModuleData = {}
var rulesets = {};
var salience_graph = SalienceGraph();
var keys_module_data = {};
return Object.freeze({
get: function (rid) {
return rulesets[rid]
},
put: function (rs) {
if (true &&
_.has(rs, 'meta.keys') &&
_.has(rs, 'meta.provides_keys')
) {
_.each(rs.meta.provides_keys, function (p, key) {
_.each(p.to, function (toRid) {
_.set(keysModuleData, [
'provided',
rs.rid,
toRid,
key
], _.cloneDeep(rs.meta.keys[key]))
})
})
}
return Object.freeze({
get: function(rid){
return rulesets[rid];
},
put: function(rs){
if(true
&& _.has(rs, "meta.keys")
&& _.has(rs, "meta.provides_keys")
){
_.each(rs.meta.provides_keys, function(p, key){
_.each(p.to, function(to_rid){
_.set(keys_module_data, [
"provided",
rs.rid,
to_rid,
key
], _.cloneDeep(rs.meta.keys[key]));
});
});
}
if (_.has(rs, 'meta.keys')) {
// "remove" keys so they don't leak out
// don't use delete b/c it mutates the loaded rs
rs = _.assign({}, rs, {
meta: _.omit(rs.meta, 'keys')
})
}
if(_.has(rs, "meta.keys")){
//"remove" keys so they don't leak out
//don't use delete b/c it mutates the loaded rs
rs = _.assign({}, rs, {
meta: _.omit(rs.meta, "keys")
});
}
salience_graph.put(rs);
rulesets[rs.rid] = rs;
},
del: function(rid){
salience_graph.del(rid);
delete rulesets[rid];
},
setupOwnKeys: function(rs){
if(rs.meta && rs.meta.keys){
_.each(rs.meta.keys, function(value, key){
_.set(keys_module_data, ["used_keys", rs.rid, key], value);
});
}
},
provideKey: function(rid, use_rid){
if(_.has(keys_module_data, ["provided", use_rid, rid])){
_.each(keys_module_data.provided[use_rid][rid], function(value, key){
_.set(keys_module_data, ["used_keys", rid, key], value);
});
}
},
getKey: function(rid, key_id){
return _.get(keys_module_data, ["used_keys", rid, key_id]);
},
salientRules: function(domain, type, ridFilter){
var to_run = salience_graph.get(domain, type);
var rules_to_select = [];
_.each(to_run, function(rules, rid){
if(!ridFilter(rid)){
return;
}
_.each(rules, function(is_on, rule_name){
if(!is_on){
return;
}
var rule = _.get(rulesets, [rid, "rules", rule_name]);
if(!rule){
return;
}
//shallow clone with it's own scope for this run
rules_to_select.push(_.assign({}, rule, {
scope: rulesets[rid].scope.push()
}));
});
});
return rules_to_select;
},
assertNoDependants: function(rid){
_.each(rulesets, function(rs){
_.each(rs.modules_used, function(info){
if(info.rid === rid){
throw new Error("\"" + rid + "\" is depended on by \"" + rs.rid + "\"");
}
});
});
},
});
};
salienceGraph.put(rs)
rulesets[rs.rid] = rs
},
del: function (rid) {
salienceGraph.del(rid)
delete rulesets[rid]
},
setupOwnKeys: function (rs) {
if (rs.meta && rs.meta.keys) {
_.each(rs.meta.keys, function (value, key) {
_.set(keysModuleData, ['used_keys', rs.rid, key], value)
})
}
},
provideKey: function (rid, useRid) {
if (_.has(keysModuleData, ['provided', useRid, rid])) {
_.each(keysModuleData.provided[useRid][rid], function (value, key) {
_.set(keysModuleData, ['used_keys', rid, key], value)
})
}
},
getKey: function (rid, keyId) {
return _.get(keysModuleData, ['used_keys', rid, keyId])
},
salientRules: function (domain, type, ridFilter) {
var toRun = salienceGraph.get(domain, type)
var rulesToSelect = []
_.each(toRun, function (rules, rid) {
if (!ridFilter(rid)) {
return
}
_.each(rules, function (isOn, ruleName) {
if (!isOn) {
return
}
var rule = _.get(rulesets, [rid, 'rules', ruleName])
if (!rule) {
return
}
// shallow clone with it's own scope for this run
rulesToSelect.push(_.assign({}, rule, {
scope: rulesets[rid].scope.push()
}))
})
})
return rulesToSelect
},
assertNoDependants: function (rid) {
_.each(rulesets, function (rs) {
_.each(rs.modules_used, function (info) {
if (info.rid === rid) {
throw new Error('"' + rid + '" is depended on by "' + rs.rid + '"')
}
})
})
}
})
}

@@ -1,56 +0,55 @@

var _ = require("lodash");
var cocb = require("co-callback");
var ktypes = require("krl-stdlib/types");
var mkKRLaction = require("./mkKRLaction");
var _ = require('lodash')
var ktypes = require('krl-stdlib/types')
var mkKRLaction = require('./mkKRLaction')
var send_directive = mkKRLaction([
"name",
"options",
], function(ctx, args, callback){
if(!_.has(args, "name")){
return callback(new Error("send_directive needs a name string"));
var sendDirective = mkKRLaction([
'name',
'options'
], function (ctx, args, callback) {
if (!_.has(args, 'name')) {
return callback(new Error('send_directive needs a name string'))
}
if (!ktypes.isString(args.name)) {
return callback(new TypeError('send_directive was given ' + ktypes.toString(args.name) + ' instead of a name string'))
}
if (!_.has(args, 'options')) {
args.options = {}
} else if (!ktypes.isMap(args.options)) {
return callback(new TypeError('send_directive was given ' + ktypes.toString(args.options) + ' instead of an options map'))
}
callback(null, ctx.addActionResponse(ctx, 'directive', {
name: args.name,
options: args.options
}))
})
module.exports = async function runAction (ctx, domain, id, args, setting) {
var returns = []
if (domain) {
var modAction = await ctx.modules.get(ctx, domain, id)
if (!ktypes.isAction(modAction)) {
throw new Error('`' + domain + ':' + id + '` is not an action')
}
if(!ktypes.isString(args.name)){
return callback(new TypeError("send_directive was given " + ktypes.toString(args.name) + " instead of a name string"));
returns = await modAction(ctx, args)
} else if (id === 'noop') {
returns = []// returns nothing
} else if (ctx.scope.has(id)) {
var definedAction = ctx.scope.get(id)
if (!ktypes.isAction(definedAction)) {
throw new Error('`' + id + '` is not defined as an action')
}
if(!_.has(args, "options")){
args.options = {};
}else if(!ktypes.isMap(args.options)){
return callback(new TypeError("send_directive was given " + ktypes.toString(args.options) + " instead of an options map"));
returns = await definedAction(ctx, args)
} else if (id === 'send_directive' || id === 'sendDirective') {
returns = await sendDirective(ctx, args)
} else {
throw new Error('`' + id + '` is not defined')
}
_.each(setting, function (id, i) {
var val = returns[i]
if (val === void 0 || _.isNaN(val)) {
val = null
}
callback(null, ctx.addActionResponse(ctx, "directive", {
name: args.name,
options: args.options
}));
});
module.exports = cocb.wrap(function*(ctx, domain, id, args, setting){
var returns = [];
if(domain){
var modAction = yield ctx.modules.get(ctx, domain, id);
if( ! ktypes.isAction(modAction)){
throw new Error("`" + domain + ":" + id + "` is not an action");
}
returns = yield modAction(ctx, args);
}else if(id === "noop"){
returns = [];//returns nothing
}else if(ctx.scope.has(id)){
var definedAction = ctx.scope.get(id);
if( ! ktypes.isAction(definedAction)){
throw new Error("`" + id + "` is not defined as an action");
}
returns = yield definedAction(ctx, args);
}else if(id === "send_directive" || id === "sendDirective"){
returns = yield send_directive(ctx, args);
}else{
throw new Error("`" + id + "` is not defined");
}
_.each(setting, function(id, i){
var val = returns[i];
if(val === void 0 || _.isNaN(val)){
val = null;
}
ctx.scope.set(id, val);
});
});
ctx.scope.set(id, val)
})
}

@@ -1,58 +0,57 @@

var _ = require("lodash");
var test = require("tape");
var cocb = require("co-callback");
var runAction = require("./runAction");
var _ = require('lodash')
var test = require('tape')
var runAction = require('./runAction')
test("runAction - send_directive", function(t){
var mkCtx = function(name, options){
return {
addActionResponse: function(ctx, type, val){
t.equals(val.name, name);
t.ok(_.isEqual(val.options, options));//to t.deepEqual, [] == {}
},
scope: {
has: _.noop
}
};
};
test('runAction - send_directive', function (t) {
var mkCtx = function (name, options) {
return {
addActionResponse: function (ctx, type, val) {
t.equals(val.name, name)
t.ok(_.isEqual(val.options, options))// to t.deepEqual, [] == {}
},
scope: {
has: _.noop
}
}
}
var noopCtx = {
addActionResponse: _.noop,
scope: {
has: _.noop
}
};
var noopCtx = {
addActionResponse: _.noop,
scope: {
has: _.noop
}
}
var testFn = function*(args, name, options){
var ctx = mkCtx(name, options);
yield runAction(ctx, void 0, "send_directive", _.cloneDeep(args), []);
};
var testFn = async function (args, name, options) {
var ctx = mkCtx(name, options)
await runAction(ctx, void 0, 'send_directive', _.cloneDeep(args), [])
}
var testErr = function*(args, error){
try{
yield runAction(noopCtx, void 0, "send_directive", args, []);
t.fail("Failed to throw an error");
}catch(err){
t.equals(err + "", error);
}
};
var testErr = async function (args, error) {
try {
await runAction(noopCtx, void 0, 'send_directive', args, [])
t.fail('Failed to throw an error')
} catch (err) {
t.equals(err + '', error)
}
}
var str = "post";
var map = {"don't": "mutate"};
var str = 'post'
var map = {"don't": 'mutate'}
var errMsg1 = "Error: send_directive needs a name string";
var errMsg2 = "TypeError: send_directive was given [Map] instead of a name string";
var errMsg1 = 'Error: send_directive needs a name string'
var errMsg2 = 'TypeError: send_directive was given [Map] instead of a name string';
cocb.run(function*(){
yield testFn([str, map], str, map);
yield testFn([str], str, {});
(async function () {
await testFn([str, map], str, map)
await testFn([str], str, {})
yield testErr([], errMsg1);
yield testErr({"options": null}, errMsg1);
yield testErr([map], errMsg2);
yield testErr([map, map], errMsg2);
yield testErr([map, str], errMsg2);
yield testErr([str, void 0], "TypeError: send_directive was given null instead of an options map");
yield testErr([str, []], "TypeError: send_directive was given [Array] instead of an options map");
}, t.end);
});
await testErr([], errMsg1)
await testErr({'options': null}, errMsg1)
await testErr([map], errMsg2)
await testErr([map, map], errMsg2)
await testErr([map, str], errMsg2)
await testErr([str, void 0], 'TypeError: send_directive was given null instead of an options map')
await testErr([str, []], 'TypeError: send_directive was given [Array] instead of an options map')
}()).then(t.end).catch(t.end)
})

@@ -1,80 +0,76 @@

var _ = require("lodash");
var cocb = require("co-callback");
var _ = require('lodash')
var assertCTX_keys = function(ctx, keys){
var std_ctx_keys = [
"rid",
"scope",
"txn_id",
"getMyKey",
"modules",
"mkFunction",
"emit",
"log",
"callKRLstdlib",
"mkAction",
"applyFn",
];
function assertCTXkeys (ctx, keys) {
var stdCtxKeys = [
'rid',
'scope',
'txn_id',
'getMyKey',
'modules',
'mkFunction',
'emit',
'log',
'callKRLstdlib',
'mkAction',
'applyFn'
]
var expected = _.cloneDeep(keys).sort().join(",");
var actual = _.pullAll(_.map(ctx, function(v, k){
if(v === void 0 || v === null || _.isNaN(v)){
throw new Error("Invalid ctx." + k + " is not defined");
}
return k;
}), std_ctx_keys).sort().join(",");
if(actual !== expected){
throw new Error("Invalid ctx expected " + expected + " but was " + actual);
var expected = _.cloneDeep(keys).sort().join(',')
var actual = _.pullAll(_.map(ctx, function (v, k) {
if (v === void 0 || v === null || _.isNaN(v)) {
throw new Error('Invalid ctx.' + k + ' is not defined')
}
};
return k
}), stdCtxKeys).sort().join(',')
module.exports = function(){
var args = Array.prototype.slice.call(arguments);
var fn = args.shift();
if (actual !== expected) {
throw new Error('Invalid ctx expected ' + expected + ' but was ' + actual)
}
}
if(process.env.NODE_ENV !== "production"){
//in development, assert ctx is shaped right
var ctx = args[0];
if(!_.has(ctx, "rid")){
throw new Error("ctx must always have `rid`");
}
if(!_.has(ctx, "scope")){
throw new Error("ctx must always have `scope`");
}
if(_.has(ctx, "event") && !_.has(ctx, "raiseEvent")){//event durring select/eval event exp
assertCTX_keys(ctx, [
"event",
"pico_id",
"rule_name",
]);
}else if(_.has(ctx, "event")){//event durring rule body
assertCTX_keys(ctx, [
"event",
"pico_id",
"rule_name",
module.exports = function runKRL () {
var args = Array.prototype.slice.call(arguments)
var fn = args.shift()
"raiseEvent",
"raiseError",
"scheduleEvent",
"addActionResponse",
"stopRulesetExecution",
]);
}else if(_.has(ctx, "query")){
assertCTX_keys(ctx, [
"query",
"pico_id",
]);
}else{
assertCTX_keys(ctx, [
//no extra keys when registering a ruleset
//TODO use a pico_id when registering rulesets
]);
}
if (process.env.NODE_ENV !== 'production') {
// in development, assert ctx is shaped right
var ctx = args[0]
if (!_.has(ctx, 'rid')) {
throw new Error('ctx must always have `rid`')
}
if (!_.has(ctx, 'scope')) {
throw new Error('ctx must always have `scope`')
}
if (_.has(ctx, 'event') && !_.has(ctx, 'raiseEvent')) { // event durring select/eval event exp
assertCTXkeys(ctx, [
'event',
'pico_id',
'rule_name'
])
} else if (_.has(ctx, 'event')) { // event durring rule body
assertCTXkeys(ctx, [
'event',
'pico_id',
'rule_name',
if( ! fn.wrapped){
fn.wrapped = cocb.wrap(fn);
'raiseEvent',
'raiseError',
'scheduleEvent',
'addActionResponse',
'stopRulesetExecution'
])
} else if (_.has(ctx, 'query')) {
assertCTXkeys(ctx, [
'query',
'pico_id'
])
} else {
assertCTXkeys(ctx, [
// no extra keys when registering a ruleset
// TODO use a pico_id when registering rulesets
])
}
return fn.wrapped.apply(null, args);
};
}
return Promise.resolve(fn.apply(null, args))
}

@@ -1,119 +0,117 @@

var _ = require("lodash");
var lt = require("long-timeout");//makes it possible to have a timeout longer than 24.8 days (2^31-1 milliseconds)
var cuid = require("cuid");
var schedule = require("node-schedule");
var _ = require('lodash')
var lt = require('long-timeout')// makes it possible to have a timeout longer than 24.8 days (2^31-1 milliseconds)
var cuid = require('cuid')
var schedule = require('node-schedule')
module.exports = function(conf){
module.exports = function (conf) {
var currTimeout
var cronById = {}
var mostRecentUpdateId
var curr_timeout;
var cron_by_id = {};
var most_recent_update_id;
var clearCurrTimeout = function () {
if (currTimeout && !conf.is_test_mode) {
lt.clearTimeout(currTimeout)
}
currTimeout = null
}
var pendingAtRemoves = 0
var clearCurrTimeout = function(){
if(curr_timeout && !conf.is_test_mode){
lt.clearTimeout(curr_timeout);
}
curr_timeout = null;
};
var pending_at_removes = 0;
/**
/**
* call update everytime the schedule in the db changes
*/
var update = function update(){
if(pending_at_removes !== 0){
return;//remove will call update() when it's done
var update = function update () {
if (pendingAtRemoves !== 0) {
return// remove will call update() when it's done
}
var myUpdateId = cuid()
mostRecentUpdateId = myUpdateId
conf.db.nextScheduleEventAt(function (err, next) {
if (mostRecentUpdateId !== myUpdateId) {
// schedule is out of date
return
}
// always clear the timeout since we're about to re-schedule it
clearCurrTimeout()
if (err) return conf.onError(err)
if (!next) {
return// nothing to schedule
}
var onTime = function () {
clearCurrTimeout()// mostly for testing, but also to be certain
if (mostRecentUpdateId !== myUpdateId) {
// schedule is out of date
return
}
var my_update_id = cuid();
most_recent_update_id = my_update_id;
conf.db.nextScheduleEventAt(function(err, next){
if(most_recent_update_id !== my_update_id){
//schedule is out of date
return;
}
//always clear the timeout since we're about to re-schedule it
clearCurrTimeout();
if(err) return conf.onError(err);
if(!next){
return;//nothing to schedule
}
var onTime = function(){
clearCurrTimeout();//mostly for testing, but also to be certain
if(most_recent_update_id !== my_update_id){
//schedule is out of date
return;
}
//remove it, but let the scheduler know that it's pending
pending_at_removes++;
conf.db.removeScheduleEventAt(next.id, next.at, function(err){
pending_at_removes--;
if(err) conf.onError(err);
update();//check the schedule for the next
});
// remove it, but let the scheduler know that it's pending
pendingAtRemoves++
conf.db.removeScheduleEventAt(next.id, next.at, function (err) {
pendingAtRemoves--
if (err) conf.onError(err)
update()// check the schedule for the next
})
//emit the scheduled job
conf.onEvent(next.event);
};
// emit the scheduled job
conf.onEvent(next.event)
}
if(conf.is_test_mode){
//in test mode they manually trigger execution of curr_timeout
curr_timeout = onTime;
}else{
//Execute the event by milliseconds from now.
//If it's in the past it will happen on the next tick
curr_timeout = lt.setTimeout(onTime, next.at.getTime() - Date.now());
}
});
};
if (conf.is_test_mode) {
// in test mode they manually trigger execution of currTimeout
currTimeout = onTime
} else {
// Execute the event by milliseconds from now.
// If it's in the past it will happen on the next tick
currTimeout = lt.setTimeout(onTime, next.at.getTime() - Date.now())
}
})
}
var r = {
update: update,
addCron: function(timespec, id, event_orig){
//clone in case event_orig get's mutated
var event = _.cloneDeep(event_orig);
var r = {
update: update,
addCron: function (timespec, id, eventOrig) {
// clone in case eventOrig get's mutated
var event = _.cloneDeep(eventOrig)
if(_.has(cron_by_id, id)){
if(true
&& timespec === cron_by_id[id].timespec
&& _.isEqual(event, cron_by_id[id].event)
){
return;//nothing changed
}
cron_by_id[id].job.cancel();//kill this cron so we can start a new on
}
var handler = function(){
conf.onEvent(event);
};
cron_by_id[id] = {
timespec: timespec,
event: event,
job: conf.is_test_mode
? {handler: handler, cancel: _.noop}
: schedule.scheduleJob(timespec, handler)
};
},
rmCron: function(id){
if(!_.has(cron_by_id, id)){
return;
}
cron_by_id[id].job.cancel();
delete cron_by_id[id];
},
};
if(conf.is_test_mode){
r.test_mode_triggerTimeout = function(){
if(curr_timeout){
curr_timeout();
}
};
r.test_mode_triggerCron = function(id){
if(_.has(cron_by_id, id)){
cron_by_id[id].job.handler();
}
};
if (_.has(cronById, id)) {
if (true &&
timespec === cronById[id].timespec &&
_.isEqual(event, cronById[id].event)
) {
return// nothing changed
}
cronById[id].job.cancel()// kill this cron so we can start a new on
}
var handler = function () {
conf.onEvent(event)
}
cronById[id] = {
timespec: timespec,
event: event,
job: conf.is_test_mode
? {handler: handler, cancel: _.noop}
: schedule.scheduleJob(timespec, handler)
}
},
rmCron: function (id) {
if (!_.has(cronById, id)) {
return
}
cronById[id].job.cancel()
delete cronById[id]
}
return r;
};
}
if (conf.is_test_mode) {
r.test_mode_triggerTimeout = function () {
if (currTimeout) {
currTimeout()
}
}
r.test_mode_triggerCron = function (id) {
if (_.has(cronById, id)) {
cronById[id].job.handler()
}
}
}
return r
}

@@ -1,195 +0,192 @@

var _ = require("lodash");
var test = require("tape");
var Scheduler = require("./Scheduler");
var _ = require('lodash')
var test = require('tape')
var Scheduler = require('./Scheduler')
test("Scheduler - at", function(t){
test('Scheduler - at', function (t) {
var log = []
var queueNextEventAt = []
var queueRemoveEventAt = []
var log = [];
var queue_nextEventAt = [];
var queue_removeEventAt = [];
var popNextEventAt = function (id, ignoreIfEmpty) {
// pop off the oldest callback
var callback = queueNextEventAt.shift()
if (ignoreIfEmpty && !callback) {
return
}
if (!id) {
return callback()
}
callback(null, {
id: id,
at: new Date(), // doesn't matter
event: id// shape doesn't matter here
})
}
var popNextEventAt = function(id, ignore_if_empty){
//pop off the oldest callback
var callback = queue_nextEventAt.shift();
if(ignore_if_empty && !callback){
return;
}
if(!id){
return callback();
}
callback(null, {
id: id,
at: new Date(),//doesn't matter
event: id,//shape doesn't matter here
});
};
var popRemoveEventAt = function () {
// pop off the oldest callback
var callback = queueRemoveEventAt.shift()
callback()
}
var popRemoveEventAt = function(){
//pop off the oldest callback
var callback = queue_removeEventAt.shift();
callback();
};
var sch = Scheduler({
is_test_mode: true,
db: {
nextScheduleEventAt: function (callback) {
queueNextEventAt.push(callback)
},
removeScheduleEventAt: function (id, at, callback) {
queueRemoveEventAt.push(callback)
}
},
onError: function (err) {
log.push(['ERROR', err])
},
onEvent: function (event) {
log.push(['EVENT', event])
}
})
var sch = Scheduler({
is_test_mode: true,
db: {
nextScheduleEventAt: function(callback){
queue_nextEventAt.push(callback);
},
removeScheduleEventAt: function(id, at, callback){
queue_removeEventAt.push(callback);
},
},
onError: function(err){
log.push(["ERROR", err]);
},
onEvent: function(event){
log.push(["EVENT", event]);
},
});
sch.update()
sch.update()
popNextEventAt('1')
sch.test_mode_triggerTimeout()
popNextEventAt('1')
sch.test_mode_triggerTimeout()
popRemoveEventAt()
popNextEventAt(null)
sch.update();
sch.update();
popNextEventAt("1");
sch.test_mode_triggerTimeout();
popNextEventAt("1");
sch.test_mode_triggerTimeout();
popRemoveEventAt();
popNextEventAt(null);
t.deepEquals(log, [['EVENT', '1']], 'the event should only fire once!')
t.deepEquals(log, [["EVENT", "1"]], "the event should only fire once!");
log = []
log = [];
sch.update()
popNextEventAt('foo')
sch.test_mode_triggerTimeout()
// notice "foo" has not be removed from the db yet
sch.update()
popNextEventAt('foo', true)// "foo" is still in the db, so naturally it will apear here
sch.test_mode_triggerTimeout()
popRemoveEventAt()
popNextEventAt(null, true)
popNextEventAt(null, true)
sch.update();
popNextEventAt("foo");
sch.test_mode_triggerTimeout();
//notice "foo" has not be removed from the db yet
sch.update();
popNextEventAt("foo", true);//"foo" is still in the db, so naturally it will apear here
sch.test_mode_triggerTimeout();
popRemoveEventAt();
popNextEventAt(null, true);
popNextEventAt(null, true);
t.deepEquals(log, [['EVENT', 'foo']], 'the event should only fire once!')
t.deepEquals(log, [["EVENT", "foo"]], "the event should only fire once!");
t.equals(queueNextEventAt.length, 0, 'should be no outstanding nextEventAt callbacks')
t.equals(queueRemoveEventAt.length, 0, 'should be no outstanding removeEventAt callbacks')
t.end()
})
t.equals(queue_nextEventAt.length, 0, "should be no outstanding nextEventAt callbacks");
t.equals(queue_removeEventAt.length, 0, "should be no outstanding removeEventAt callbacks");
var nTicks = function (n, callback) {
if (n === 0) {
callback()
return
}
process.nextTick(function () {
nTicks(n - 1, callback)
})
}
t.end();
});
if(process.env.SKIP_LONG_TESTS === "true"){
//skip the generative test when running the tests quick i.e. `npm start`
return;
var randomTick = function (callback) {
// 0 means no tick i.e. synchronous
nTicks(_.random(0, 4), callback)
}
var nTicks = function(n, callback){
if(n === 0){
callback();
return;
}
process.nextTick(function(){
nTicks(n - 1, callback);
});
};
test('Scheduler - at - generative test', function (t) {
var nEvents = 50000
var randomTick = function(callback){
//0 means no tick i.e. synchronous
nTicks(_.random(0, 4), callback);
};
if (process.env.SKIP_LONG_TESTS === 'true') {
// shorten the generative test when running the tests quick i.e. `npm start`
nEvents = 5
}
test("Scheduler - at - generative test", function(t){
var log = []
var eventQueue = []
var n_events = 50000;
var sch = Scheduler({
is_test_mode: true,
db: {
nextScheduleEventAt: function (callback) {
randomTick(function () {
if (eventQueue.length === 0) {
// console.log("popNextEventAt(null)");
return callback()
}
// read the next event to run, then tick again
var id = eventQueue[0]
var next = {
id: id,
at: new Date(), // doesn't matter for this test
event: id// shape doesn't matter for this test
}
randomTick(function () {
// console.log("popNextEventAt(", id, ")");
callback(null, next)
nTicks(_.random(1, 4), function () {
// console.log("test_mode_triggerTimeout()");
sch.test_mode_triggerTimeout()
})
})
})
},
removeScheduleEventAt: function (id, at, callback) {
randomTick(function () {
_.pull(eventQueue, id)
randomTick(function () {
// console.log("popRemoveEventAt()", id);
callback()
if (id === nEvents) {
process.nextTick(function () {
onDone()
})
}
})
})
}
},
onError: function (err) {
// this test expects no errors to occur
t.end(err)
},
onEvent: function (event) {
log.push(event)
}
})
// console.log("update()");
sch.update()
var log = [];
var event_queue = [];
var eventI = 0
var sch = Scheduler({
is_test_mode: true,
db: {
nextScheduleEventAt: function(callback){
randomTick(function(){
if(event_queue.length === 0){
//console.log("popNextEventAt(null)");
return callback();
}
//read the next event to run, then tick again
var id = event_queue[0];
var next = {
id: id,
at: new Date(),//doesn't matter for this test
event: id,//shape doesn't matter for this test
};
randomTick(function(){
//console.log("popNextEventAt(", id, ")");
callback(null, next);
nTicks(_.random(1, 4), function(){
//console.log("test_mode_triggerTimeout()");
sch.test_mode_triggerTimeout();
});
});
});
},
removeScheduleEventAt: function(id, at, callback){
randomTick(function(){
_.pull(event_queue, id);
randomTick(function(){
//console.log("popRemoveEventAt()", id);
callback();
if(id === n_events){
process.nextTick(function(){
onDone();
});
}
});
});
},
},
onError: function(err){
//this test expects no errors to occur
t.end(err);
},
onEvent: function(event){
log.push(event);
},
});
//console.log("update()");
sch.update();
var tickLoop = function () {
if (eventI >= nEvents) {
return
}
randomTick(function () {
eventI++
eventQueue.push(eventI)
// console.log("update()");
sch.update()
tickLoop()
})
}
tickLoop()
var event_i = 0;
var tickLoop = function(){
if(event_i >= n_events){
return;
}
randomTick(function(){
event_i++;
event_queue.push(event_i);
//console.log("update()");
sch.update();
tickLoop();
});
};
tickLoop();
function onDone(){
var fail = false;
var i;
for(i = 0; i < log.length; i++){
if(log[i] !== (i + 1)){
fail = true;
break;
}
}
if(fail){
t.fail("events out of order! " + log.join(","));
}else{
t.ok(true, "events in order");
}
t.end();
function onDone () {
var fail = false
var i
for (i = 0; i < log.length; i++) {
if (log[i] !== (i + 1)) {
fail = true
break
}
}
});
if (fail) {
t.fail('events out of order! ' + log.join(','))
} else {
t.ok(true, 'events in order')
}
t.end()
}
})

@@ -1,155 +0,144 @@

var _ = require("lodash");
var cocb = require("co-callback");
var async = require("async");
var ktypes = require("krl-stdlib/types");
var runKRL = require("./runKRL");
var aggregateEvent = require("./aggregateEvent");
var _ = require('lodash')
var ktypes = require('krl-stdlib/types')
var runKRL = require('./runKRL')
var aggregateEvent = require('./aggregateEvent')
var getAttrString = function(ctx, attr){
return _.has(ctx, ["event", "attrs", attr])
? ktypes.toString(ctx.event.attrs[attr])
: "";
};
function getAttrString (ctx, attr) {
return _.has(ctx, ['event', 'attrs', attr])
? ktypes.toString(ctx.event.attrs[attr])
: ''
}
var evalExpr = cocb.wrap(function*(ctx, rule, aggregator, exp, setting){
var recur = function*(e){
return yield evalExpr(ctx, rule, aggregator, e, setting);
};
if(_.isArray(exp)){
if(exp[0] === "not"){
return !(yield recur(exp[1]));
}else if(exp[0] === "and"){
return (yield recur(exp[1])) && (yield recur(exp[2]));
}else if(exp[0] === "or"){
return (yield recur(exp[1])) || (yield recur(exp[2]));
}
async function evalExpr (ctx, rule, aggregator, exp, setting) {
var recur = function (e) {
return evalExpr(ctx, rule, aggregator, e, setting)
}
if (_.isArray(exp)) {
let r
if (exp[0] === 'not') {
r = !(await recur(exp[1]))
} else if (exp[0] === 'and') {
r = (await recur(exp[1])) && (await recur(exp[2]))
} else if (exp[0] === 'or') {
r = (await recur(exp[1])) || (await recur(exp[2]))
}
//only run the function if the domain and type match
var domain = ctx.event.domain;
var type = ctx.event.type;
if(_.get(rule, ["select", "graph", domain, type, exp]) !== true){
return false;
}
return yield runKRL(rule.select.eventexprs[exp], ctx, aggregator, getAttrString, setting);
});
return r
}
// only run the function if the domain and type match
var domain = ctx.event.domain
var type = ctx.event.type
if (_.get(rule, ['select', 'graph', domain, type, exp]) !== true) {
return false
}
return runKRL(rule.select.eventexprs[exp], ctx, aggregator, getAttrString, setting)
}
var getNextState = cocb.wrap(function*(ctx, rule, curr_state, aggregator, setting){
var stm = rule.select.state_machine[curr_state];
async function getNextState (ctx, rule, currState, aggregator, setting) {
var stm = rule.select.state_machine[currState]
var i;
for(i=0; i < stm.length; i++){
if(yield evalExpr(ctx, rule, aggregator, stm[i][0], setting)){
//found a match
return stm[i][1];
}
var i
for (i = 0; i < stm.length; i++) {
if (await evalExpr(ctx, rule, aggregator, stm[i][0], setting)) {
// found a match
return stm[i][1]
}
if(curr_state === "end"){
return "start";
}
return curr_state;//by default, stay on the current state
});
}
if (currState === 'end') {
return 'start'
}
return currState// by default, stay on the current state
}
var shouldRuleSelect = cocb.wrap(function*(core, ctx, rule){
async function shouldRuleSelect (core, ctx, rule) {
var smData = await core.db.getStateMachineYieldable(ctx.pico_id, rule)
var sm_data = yield core.db.getStateMachineYieldable(ctx.pico_id, rule);
var bindings = smData.bindings || {}
var bindings = sm_data.bindings || {};
if (_.isFunction(rule.select && rule.select.within)) {
if (!_.isNumber(smData.starttime)) {
smData.starttime = ctx.event.timestamp.getTime()
}
var timeSinceLast = ctx.event.timestamp.getTime() - smData.starttime
if(_.isFunction(rule.select && rule.select.within)){
// restore any stored variables in a temporary scope
var ctx2 = core.mkCTX(_.assign({}, ctx, {
scope: ctx.scope.push()
}))
_.each(bindings, function (val, id) {
ctx2.scope.set(id, val)
})
var timeLimit = await runKRL(rule.select.within, ctx2)
if(!_.isNumber(sm_data.starttime)){
sm_data.starttime = ctx.event.timestamp.getTime();
}
var time_since_last = ctx.event.timestamp.getTime() - sm_data.starttime;
// restore any stored variables in a temporary scope
var ctx2 = core.mkCTX(_.assign({}, ctx, {
scope: ctx.scope.push(),
}));
_.each(bindings, function(val, id){
ctx2.scope.set(id, val);
});
var time_limit = yield runKRL(rule.select.within, ctx2);
if(time_since_last > time_limit){
// time has expired, reset the state machine
sm_data.state = "start";
}
if(sm_data.state === "start"){
// set or reset the clock
sm_data.starttime = ctx.event.timestamp.getTime();
bindings = {};
}
if (timeSinceLast > timeLimit) {
// time has expired, reset the state machine
smData.state = 'start'
}
if (smData.state === 'start') {
// set or reset the clock
smData.starttime = ctx.event.timestamp.getTime()
bindings = {}
}
}
// restore any variables that were stored
_.each(bindings, function(val, id){
ctx.scope.set(id, val);
});
// restore any variables that were stored
_.each(bindings, function (val, id) {
ctx.scope.set(id, val)
})
var aggregator = aggregateEvent(core, sm_data.state, rule);
var aggregator = aggregateEvent(core, smData.state, rule)
var setting = function(id, val){
ctx.scope.set(id, val);
bindings[id] = val;
};
var setting = function (id, val) {
ctx.scope.set(id, val)
bindings[id] = val
}
var next_state = yield getNextState(ctx, rule, sm_data.state, aggregator, setting);
var nextState = await getNextState(ctx, rule, smData.state, aggregator, setting)
yield core.db.putStateMachineYieldable(ctx.pico_id, rule, {
state: next_state,
starttime: sm_data.starttime,
bindings: next_state === "end"
? {}
: bindings,
});
await core.db.putStateMachineYieldable(ctx.pico_id, rule, {
state: nextState,
starttime: smData.starttime,
bindings: nextState === 'end'
? {}
: bindings
})
return next_state === "end";
});
return nextState === 'end'
}
var selectForPico = function(core, ctx, pico_rids, callback){
module.exports = async function selectRulesToEval (core, ctx) {
// read this fresh everytime we select, b/c it might have changed during event processing
var picoRids = await core.db.ridsOnPicoYieldable(ctx.pico_id)
var rules_to_select = core.rsreg.salientRules(ctx.event.domain, ctx.event.type, function(rid){
if(pico_rids[rid] !== true){
return false;
}
if(_.has(ctx.event, "for_rid") && _.isString(ctx.event.for_rid)){
if(rid !== ctx.event.for_rid){
return false;
}
}
return true;
});
var rulesToSelect = core.rsreg.salientRules(ctx.event.domain, ctx.event.type, function (rid) {
if (picoRids[rid] !== true) {
return false
}
if (_.has(ctx.event, 'for_rid') && _.isString(ctx.event.for_rid)) {
if (rid !== ctx.event.for_rid) {
return false
}
}
return true
})
async.filter(rules_to_select, function(rule, next){
var ruleCTX = core.mkCTX({
rid: rule.rid,
scope: rule.scope,
event: ctx.event,
pico_id: ctx.pico_id,
rule_name: rule.name,
});
cocb.run(shouldRuleSelect(core, ruleCTX, rule), next);
}, function(err, rules){
if(err){
process.nextTick(function(){
//wrapping in nextTick resolves strange issues with UnhandledPromiseRejectionWarning
//when infact we are handling the rejection
callback(err);
});
return;
}
//rules in the same ruleset must fire in order
callback(void 0, _.reduce(_.groupBy(rules, "rid"), function(acc, rules){
return acc.concat(rules);
}, []));
});
};
var rules = await Promise.all(rulesToSelect.map(function (rule) {
var ruleCTX = core.mkCTX({
rid: rule.rid,
scope: rule.scope,
event: ctx.event,
pico_id: ctx.pico_id,
rule_name: rule.name
})
return shouldRuleSelect(core, ruleCTX, rule)
.then(function (shouldSelect) {
return shouldSelect ? rule : null
})
}))
rules = _.compact(rules)
module.exports = function(core, ctx, callback){
//read this fresh everytime we select, b/c it might have changed during event processing
core.db.ridsOnPico(ctx.pico_id, function(err, pico_rids){
if(err) return callback(err);
selectForPico(core, ctx, pico_rids, callback);
});
};
// rules in the same ruleset must fire in order
rules = _.reduce(_.groupBy(rules, 'rid'), function (acc, rules) {
return acc.concat(rules)
}, [])
return rules
}

@@ -1,10 +0,10 @@

module.exports = function(t, module){
return function*(fn_name, ctx, args, error, message){
try{
yield module[fn_name](ctx, args);
t.fail("Failed to throw an error");
}catch(err){
t.equals(err + "", error, message);
}
};
};
module.exports = function (t, module) {
return async function (fnName, ctx, args, error, message) {
try {
await module[fnName](ctx, args)
t.fail('Failed to throw an error')
} catch (err) {
t.equals(err + '', error, message)
}
}
}

@@ -1,19 +0,19 @@

require("./extractRulesetID.test");
require("./DB.test");
require("./PicoQueue.test");
require("./modules/http.test");
require("./modules/time.test");
require("./modules/engine.test");
require("./modules/event.test");
require("./modules/math.test");
require("./modules/random.test");
require("./modules/schedule.test");
require("./cleanEvent.test");
require("./cleanQuery.test");
require("./runAction.test");
require("./Scheduler.test");
require("./migrations/20171031T182007_pvar_index.test.js");
require("./ChannelPolicy.test");
require('./extractRulesetID.test')
require('./DB.test')
require('./PicoQueue.test')
require('./modules/http.test')
require('./modules/time.test')
require('./modules/engine.test')
require('./modules/event.test')
require('./modules/math.test')
require('./modules/random.test')
require('./modules/schedule.test')
require('./cleanEvent.test')
require('./cleanQuery.test')
require('./runAction.test')
require('./Scheduler.test')
require('./migrations/20171031T182007_pvar_index.test.js')
require('./ChannelPolicy.test')
//run system tests last
require("./index.test");
// run system tests last
require('./index.test')

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc