Comparing version 0.0.7 to 0.1.0
var Error = require('./error'); | ||
var clone = require('node-v8-clone').clone; | ||
var validator = require('./validator'); | ||
var drivers = {}; | ||
@@ -7,3 +8,2 @@ | ||
var bad = []; | ||
function get(obj) { | ||
@@ -26,2 +26,3 @@ var keys = Object.keys(obj); | ||
allowedOperators: ["$in", "$exists"], | ||
init: function(driver, params, callback) { | ||
@@ -43,12 +44,16 @@ driver = driver.toLowerCase(); | ||
}, | ||
validateSchema: function() { | ||
}, | ||
instance: function() { | ||
var self = this; | ||
if (Object.keys(drivers).length === 0) { | ||
throw "No drivers have been inited yet! Please call BorgDB.init first!"; | ||
} | ||
this.id = Math.random().toString(36).substring(7); | ||
this.schemas = {}; | ||
this.hooks = {}; | ||
self.id = Math.random().toString(36).substring(7); | ||
self.schemas = {}; | ||
self.hooks = {}; | ||
return self; | ||
} | ||
@@ -58,3 +63,2 @@ }; | ||
BorgDB.instance.prototype = { | ||
//asimalate all data :) | ||
@@ -66,6 +70,18 @@ BorgCube: function(collection_name, array) { | ||
value: function(callback) { | ||
var saveValue = clone(this, true); | ||
if (Object.keys(drivers).length > 0) driver = drivers[Object.keys(drivers)[0]]; | ||
if (this.__instance.schemas[collection_name]) { | ||
//check to make sure the insert matches the schema | ||
//check to make sure no validate to schema | ||
for (var key in this) { | ||
if (key !== 'id' && this.__instance.schemas[collection_name].properties[key].ref) { | ||
if(this[key] instanceof Array) { | ||
saveValue[key] = []; | ||
for (var k in this[key]) { | ||
saveValue[key].push(this[key][k].id || this[key][k]); | ||
} | ||
} else { | ||
saveValue[key] = this[key].id || this[key]; | ||
} | ||
} | ||
} | ||
@@ -75,3 +91,9 @@ | ||
return driver.save(fixed_collection_name || this.__collection, this, callback); | ||
// Validate properties and data types | ||
var invalid = this.__instance.validate(saveValue, this.__instance.schemas[collection_name], null); | ||
if (invalid) { | ||
return callback(new Error(invalid)); | ||
} | ||
return driver.save(fixed_collection_name || this.__collection, saveValue, callback); | ||
} | ||
@@ -90,3 +112,3 @@ }); | ||
if (key !== 'id' && !(array[i][key] instanceof Object) && this.schemas[collection_name].properties[key].ref) { | ||
if (this.schemas[collection_name].properties[key].type === 'array') { | ||
if (this.schemas[collection_name].properties[key].datatype === 'array') { | ||
@@ -122,2 +144,6 @@ } else { | ||
validate: function (object, schema, options) { | ||
return validator.validate(object, schema, options); | ||
}, | ||
freeze: function() { | ||
@@ -134,2 +160,3 @@ Object.freeze(this.schemas); // freeze the this.schemas object. | ||
}, | ||
hook: function(type, action) { | ||
@@ -145,2 +172,3 @@ var hook_types = ['collection_name']; | ||
}, | ||
find: function(query, collection_data, options, callback) { | ||
@@ -151,4 +179,2 @@ if (typeof options === 'function') { | ||
} | ||
// console.log(this.id); | ||
// console.log(this.schemas); | ||
@@ -160,18 +186,24 @@ var instance = this; | ||
// Check specialKey operators (i.e. $exists, $in) | ||
if (getBadSpecialKeys(query, BorgDB.allowedOperators).length > 0) { | ||
callback(new Error('The following operators/keys are not allowed in your DB queries: ' + getBadSpecialKeys(query, BorgDB.allowedOperators))); | ||
return callback(new Error('The following operators/keys are not allowed in your DB queries: ' + getBadSpecialKeys(query, BorgDB.allowedOperators))); | ||
} | ||
// Get Collection Name | ||
// or Throw an error if the requested collection is not found | ||
var collection_name = collection_data.substring(0, collection_data.indexOf("{") > 0 ? collection_data.indexOf("{") : collection_data.length); | ||
var property_group = collection_data.substring(collection_data.indexOf("{") + 1, collection_data.indexOf("}")); | ||
if (!this.schemas[collection_name]) return callback(new Error('The requested collection of ' + collection_name + ' is invalid/has not yet been regestered')); | ||
// Get PropertyGroup if exists | ||
// or Throw an error if the requested PropertyGroup is not found | ||
var property_group = collection_data.substring(collection_data.indexOf("{") + 1, collection_data.indexOf("}")); | ||
if (property_group && !this.schemas[collection_name].propertyGroups[property_group]) { | ||
callback(new Error('Invalid property group of ' + property_group + ' on the collection: ' + collection_name)); | ||
} else if (property_group && this.schemas[collection_name].propertyGroups[property_group]) { | ||
options.fields = this.schemas[collection_name].propertyGroups[property_group]; | ||
} | ||
// Get the Schema | ||
if (this.schemas[collection_name]) { | ||
options.join = []; | ||
for (var prop in this.schemas[collection_name].properties) { | ||
@@ -187,7 +219,15 @@ if (this.schemas[collection_name].properties[prop].ref !== undefined) { | ||
} | ||
} else { | ||
return callback(new Error('Schema could not be found for the collection: ' + collection_name)); | ||
} | ||
// Hook the collection name | ||
if (this.hooks['collection_name']) var fixed_collection_name = this.hooks['collection_name'](collection_name); | ||
// Validate properties and data types | ||
var invalid = this.validate(query, this.schemas[collection_name], null); | ||
if (invalid) { | ||
return callback(new Error(invalid)); | ||
} | ||
return driver.find(query, fixed_collection_name || collection_name, options, function(err, docs) { | ||
@@ -199,14 +239,23 @@ if (err) callback(err); | ||
}, | ||
create: function(collection_name, data, callback) { | ||
callback = callback || options; | ||
if (Object.keys(drivers).length > 0) driver = drivers[Object.keys(drivers)[0]]; | ||
if (this.schemas[collection_name]) { | ||
//check to make sure the insert matches the schema | ||
create: function(collection_name, data, options, callback) { | ||
if (typeof options === 'function') { | ||
callback = options; | ||
options = {}; | ||
} | ||
if (Object.keys(drivers).length > 0) driver = drivers[Object.keys(drivers)[0]]; | ||
if (this.hooks['collection_name']) var fixed_collection_name = this.hooks['collection_name'](collection_name); | ||
// Validate properties and data types | ||
var invalid = this.validate(data, this.schemas[collection_name], null); | ||
if (invalid) { | ||
return callback(new Error(invalid)); | ||
} | ||
return driver.create(fixed_collection_name || collection_name, data, callback); | ||
}, | ||
delete: function(collection_name, query, callback) { | ||
@@ -213,0 +262,0 @@ callback = callback || options; |
var mongo = require('mongoskin'), | ||
Join = require('mongo-join').Join, | ||
objectDiff = require('objectdiff'), | ||
Mongify = require('mongify'), | ||
Error = require('../error'), | ||
ObjectID = mongo.ObjectID, | ||
DB = undefined; | ||
ObjectID = mongo.ObjectID; | ||
@@ -18,29 +18,24 @@ function mongoKey(key) { | ||
var Driver = function() { | ||
if (DB) { | ||
var Driver = function(conn) { | ||
var self = this; | ||
if (self.DB) { | ||
throw new Error('The mongodb driver has already been initialized'); | ||
} | ||
DB = this.connect.call(this, arguments[0]); | ||
//DB = mongo; | ||
self.DB = conn; | ||
if (!self.tables) { | ||
self.tables = []; | ||
} | ||
return self; | ||
}; | ||
Driver.prototype = { | ||
connect: function(params) { | ||
var conn = mongo.db(params.url, { | ||
safe: true | ||
}); | ||
if (!DB) { | ||
return conn; | ||
} | ||
DB = conn; | ||
return conn; | ||
}, | ||
create: function(bucket, data, callback) { | ||
var self = this; | ||
delete data._id; | ||
delete data.id; | ||
var collection = self.DB.collection(bucket); | ||
data._id = mongoKey(data._id); | ||
DB.collection(bucket).insert(data, function(err, docs) { | ||
collection.insert(data, function(err, docs) { | ||
if (err) return callback(err); | ||
@@ -54,5 +49,30 @@ | ||
}, | ||
find: function(spec, bucket, options, callback) { | ||
find: function(bucket, query, options, callback) { | ||
var fields; | ||
var joins; | ||
var self = this; | ||
var collection = self.DB.collection(bucket); | ||
if (query.id) { | ||
query._id = mongoKey(query.id); | ||
delete query.id; | ||
} | ||
if (typeof query._id === 'string') { | ||
query._id = mongoKey(query._id); | ||
} | ||
if (options instanceof Function && !callback) { | ||
callback = options; | ||
options = {}; | ||
} | ||
if (options.fields) { | ||
fields = {}; | ||
for (var i = 0; i < options.fields.length; i++) { | ||
fields[options.fields[i]] = 1; | ||
} | ||
delete options.fields; | ||
} | ||
if (options.join) { | ||
@@ -63,49 +83,50 @@ joins = options.join; | ||
if (spec.id) { | ||
spec._id = mongoKey(spec.id); | ||
delete spec.id; | ||
} | ||
if (typeof spec._id === 'string') { | ||
spec._id = mongoKey(spec._id); | ||
} | ||
var _processResults = function(err, cursor) { | ||
if (options.limit && options.limit === 1) { | ||
DB.collection(bucket).findOne(spec, function(err, doc) { | ||
doc.id = doc._id | ||
delete doc._id; | ||
callback(err, doc); | ||
}); | ||
} else { | ||
DB.collection(bucket).find(spec, function(err, cursor) { | ||
if (joins && joins.length > 0) { | ||
if (err) return callback(err); | ||
var join = new Join(DB._dbconn); | ||
for (var i = 0; i < joins.length; i++) { | ||
join.on({ | ||
field: joins[i].field, | ||
to: '_id', | ||
from: joins[i].collection | ||
}); | ||
} | ||
if (cursor && cursor._id) { | ||
cursor.id = cursor._id | ||
delete cursor._id; | ||
return callback(err, cursor); | ||
} | ||
join.toArray(cursor, function(err, docs) { | ||
for (var i = 0; i < docs.length; i++) { | ||
docs[i].id = docs[i]._id | ||
delete docs[i]._id; | ||
} | ||
callback(err, docs); | ||
if (joins && joins.length > 0) { | ||
var join = new Join(self.DB._dbconn); | ||
for (var i = 0; i < joins.length; i++) { | ||
join.on({ | ||
field: joins[i].field, | ||
to: '_id', | ||
from: joins[i].collection | ||
}); | ||
} else { | ||
cursor.toArray(function(err, docs) { | ||
for (var i = 0; i < docs.length; i++) { | ||
docs[i].id = docs[i]._id | ||
delete docs[i]._id; | ||
} | ||
callback(err, docs); | ||
}); | ||
} | ||
}); | ||
join.toArray(cursor, function(err, docs) { | ||
for (var i = 0; i < docs.length; i++) { | ||
docs[i].id = docs[i]._id | ||
delete docs[i]._id; | ||
} | ||
callback(err, docs); | ||
}); | ||
} else { | ||
cursor.toArray(function(err, docs) { | ||
if (!docs) callback(err); | ||
for (var i = 0; i < docs.length; i++) { | ||
docs[i].id = docs[i]._id | ||
delete docs[i]._id; | ||
} | ||
callback(err, docs); | ||
}); | ||
} | ||
}; | ||
if (typeof query._id === 'object') { | ||
return collection.findById(query._id, fields, options, _processResults) | ||
} | ||
return collection.find(query, fields, options, _processResults); | ||
}, | ||
delete: function(bucket, query, callback) { | ||
var self = this; | ||
if (query.id) { | ||
@@ -119,3 +140,3 @@ query._id = mongoKey(query.id); | ||
DB.collection(bucket).remove(query, function(err, rs) { | ||
return self.DB.collection(bucket).remove(query, function(err, rs) { | ||
if (err) return callback(err); | ||
@@ -127,3 +148,3 @@ | ||
save: function(bucket, document, callback) { | ||
delete document._id; | ||
var self = this; | ||
if (document.id) { | ||
@@ -134,3 +155,3 @@ document._id = mongoKey(document.id); | ||
DB.collection(bucket).save(document, function(err, rs) { | ||
return self.DB.collection(bucket).save(document, function(err, rs) { | ||
if (err) return callback(err); | ||
@@ -140,2 +161,5 @@ | ||
}); | ||
}, | ||
read: function (bucket, id, callback) { | ||
return this.find(bucket, {'_id': id}, callback); | ||
} | ||
@@ -142,0 +166,0 @@ }; |
@@ -1,129 +0,112 @@ | ||
var r = require('rethinkdb'); | ||
var objectDiff = require('objectdiff'); | ||
var util = require('util'); | ||
var RethinkDB = require('rethinkdb'); | ||
var Error = require('../error'); | ||
var Error = require('../util/error'); | ||
var Driver = function(conn) { | ||
var self = this; | ||
if (self.DB) { | ||
throw new Error('The rethinkdb driver has already been initialized'); | ||
} | ||
self.DB = conn; | ||
if (!self.tables) { | ||
self.tables = []; | ||
} | ||
return self; | ||
}; | ||
var Driver = function OsmosRethinkDriver(connection, options) { | ||
this.connection = connection; | ||
this.options = options || {}; | ||
} | ||
Driver.prototype = { | ||
get : function get(bucket, key, callback) { | ||
r.table(bucket, this.options).get(key).run(this.connection, callback); | ||
}, | ||
create : function create(bucket, document, data, callback) { | ||
r | ||
.table(bucket, this.options) | ||
.insert(data) | ||
.run(this.connection, function(err, result) { | ||
if (err) return callback(err); | ||
if (result.errors) return callback(new Error('Database insertion error: ' + JSON.stringify(result.first_error))); | ||
if (result.generated_keys) { | ||
document.primaryKey = result.generated_keys[0]; | ||
} | ||
callback(null); | ||
}); | ||
}, | ||
put : function put(bucket, document, data, callback) { | ||
if (!document.model.schema.primaryKey || !document.primaryKey) { | ||
throw new Error('You cannot put a document without a primary key'); | ||
} | ||
if (document.__originalData__[document.model.schema.primaryKey] == undefined) { | ||
return this.create(bucket, document, data, callback); | ||
} | ||
var changes = objectDiff.diff(document.__originalData__, data); | ||
if (changes.changed === 'equal') return callback(null); // Nothing to update. | ||
var diff = {}; | ||
Object.keys(changes.value).forEach(function(key) { | ||
if (changes.value[key].changed !== 'equal') { | ||
diff[key] = data[key]; | ||
} | ||
}); | ||
var primaryKey; | ||
if (diff[document.schema.primaryKey]) { | ||
primaryKey = document.__originalData__[document.schema.primaryKey]; | ||
create: function (bucket, data, callback) { | ||
var self = this; | ||
var _done = false; | ||
var _error; | ||
var _result; | ||
var _callback = function(error, result) { | ||
_done = true; | ||
_error = error; | ||
_result = result; | ||
callback(error, result); | ||
}; | ||
var insert = function insert(error, result) { | ||
self.tables.push(bucket); | ||
RethinkDB.table(bucket) | ||
.insert(data) | ||
.run(self.DB, _callback); | ||
}; | ||
if (self.tables.indexOf(bucket) != -1) { | ||
insert(); | ||
} else { | ||
primaryKey = document.primaryKey; | ||
RethinkDB.tableCreate(bucket).sync().run(self.DB, insert); | ||
} | ||
r | ||
.table(bucket, this.options) | ||
.get(primaryKey) | ||
.update(diff) | ||
.run(this.connection, function(err, result) { | ||
if (err) return callback(err); | ||
if (result.errors) return callback(new Error('Database update error: ' + JSON.stringify(result.first_error))); | ||
if (diff[document.schema.primaryKey]) { | ||
document.primaryKey = document.schema.primaryKey; // In case the primary key has changed. | ||
} | ||
callback(null); | ||
}); | ||
return function tableCreated (callback) { | ||
if (_done) { | ||
_(_error, _result); | ||
} else { | ||
_callback = _; | ||
}; | ||
}; | ||
}, | ||
del : function deleteRecord(bucket, key, callback) { | ||
if (key.constructor.name === 'Object') { | ||
key = key[Object.keys(key)[0]]; | ||
read: function(bucket, query, callback) { | ||
var self = this; | ||
if (query instanceof Array) { | ||
[query, options] = query; | ||
if (options.join) { | ||
joins = options.join; | ||
delete options.join; | ||
} | ||
} | ||
r | ||
.table(bucket, this.options) | ||
.get(key) | ||
.delete() | ||
.run(this.connection, function(err, result) { | ||
if (err) return callback(err); | ||
if (result.errors) return callback(new Error('Database deletion error: ' + JSON.stringify(result.first_error))); | ||
callback(null); | ||
}); | ||
return RethinkDB.table(bucket).get(query).run(self.DB, callback); | ||
}, | ||
findOne : function findOne(bucket, spec, callback) { | ||
var table = r.table(bucket, this.options); | ||
if (typeof spec === 'function') { | ||
spec(this.connection, table, callback); | ||
} else { | ||
table.getAll(spec.search, { index : spec.index }).limit(1).run(this.connection, function(err, cursor) { | ||
if (err) return callback(err); | ||
cursor.toArray(function(err, docs) { | ||
callback(err, docs.length ? docs[0] : null); | ||
}); | ||
}); | ||
find: function(bucket, query, callback) { | ||
var self = this; | ||
if (query instanceof Array) { | ||
[query, options] = query; | ||
if (options.join) { | ||
joins = options.join; | ||
delete options.join; | ||
} | ||
} | ||
}, | ||
find : function find(bucket, spec, callback) { | ||
var table = r.table(bucket, this.options); | ||
if (typeof spec === 'function') { | ||
spec(this.connection, table, callback); | ||
} else { | ||
table.getAll(spec.search, { index : spec.index }).run(this.connection, function(err, cursor) { | ||
if (err) return callback(err); | ||
cursor.toArray(function(err, docs) { | ||
callback(err, docs); | ||
}); | ||
}); | ||
var _callback = function (err, cursor) { | ||
if (err) return callback(err); | ||
cursor.toArray(callback); | ||
}; | ||
var $query = RethinkDB.table(bucket).filter(query); | ||
for (var o in operators) { | ||
var ov = operators[o]; | ||
switch (o) { | ||
case "or": | ||
for (var k in ov) { | ||
console.log(typeof ov[k], ov[k]); | ||
} | ||
console.log(o, operators[o], query); | ||
break; | ||
} | ||
} | ||
return $query.run(self.DB, _callback); | ||
}, | ||
delete: function(bucket, query, callback) { | ||
}, | ||
save: function(bucket, document, callback) { | ||
} | ||
}; | ||
module.exports = Driver; |
(function(exports) { | ||
exports.mixin = mixin; | ||
exports.validate = validate; | ||
exports.validateAction = validateAction; | ||
exports.validateBlueprint = validateBlueprint; | ||
exports.validateQuery = validateQuery; | ||
exports.mixin = mixin; | ||
// | ||
// ### function validate (object, schema, options) | ||
// #### {Object} object the object to validate. | ||
// #### {Object} schema (optional) the JSON Schema to validate against. | ||
// #### {Object} options (optional) options controlling the validation | ||
// process. | ||
// Validate <code>object</code> against a JSON Schema. | ||
// If <code>object</code> is self-describing (i.e. has a | ||
// <code>$schema</code> property), it will also be validated | ||
// against the referenced schema. [TODO]: This behaviour bay be | ||
// suppressed by setting the {@link #validate.options.???} | ||
// option to <code>???</code>.[/TODO] | ||
// | ||
// If <code>schema</code> is not specified, and <code>object</code> | ||
// is not self-describing, validation always passes. | ||
// | ||
// <strong>Note:</strong> in order to pass options but no schema, | ||
// <code>schema</code> <em>must</em> be specified in the call to | ||
// <code>validate()</code>; otherwise, <code>options</code> will | ||
// be interpreted as the schema. <code>schema</code> may be passed | ||
// as <code>null</code>, <code>undefinded</code>, or the empty object | ||
// (<code>{}</code>) in this case. | ||
// | ||
function validate(object, schema, options) { | ||
options = mixin({}, options); | ||
var errors = []; | ||
validateObject(object, schema, options, errors); | ||
// | ||
// TODO: self-described validation | ||
// if (! options.selfDescribing) { ... } | ||
// | ||
return { | ||
valid: !(errors.length), | ||
errors: errors | ||
}; | ||
} | ||
function validateQuery(query, schema, options) { | ||
options = mixin({}, options); | ||
var errors = []; | ||
return { | ||
valid: !(errors.length), | ||
errors: errors | ||
}; | ||
} | ||
function validateBlueprint(schema) { | ||
var errors = []; | ||
if (!schema.collections) errors.push({ | ||
message: "Schema MUST contain collections" | ||
}); | ||
for (var key in schema.collections) { | ||
if (/[^a-zA-Z0-9]/.test(key)) errors.push({ | ||
message: "Schema collection names must be alphanumaric, " + key + " is not" | ||
}); | ||
if (key.length > 20 || key.length < 2) errors.push({ | ||
message: "Schema collection names must be more then 2 characters but less then 20, " + key + " is " + key.length | ||
}); | ||
for (var prop in schema.collections[key].properties) { | ||
var property = schema.collections[key].properties[prop]; | ||
if (/[^a-zA-Z0-9]/.test(prop)) errors.push({ | ||
message: "Collection field names must be alphanumaric, " + prop + " is not" | ||
}); | ||
if (prop.length > 12 || prop.length < 2) errors.push({ | ||
message: "Collection field names must be more then 2 characters but less then 12, " + prop + " is " + prop.length | ||
}); | ||
if (!property.type) errors.push({ | ||
message: "Collection fields must have a type " + prop + " does not" | ||
}); | ||
} | ||
} | ||
for (var key in schema.api) { | ||
if (/[^a-zA-Z0-9\/{}]/.test(key)) errors.push({ | ||
message: "Schema API enpoints names must be alphanumaric (except for variables), and start with a slash, " + key + " does not" | ||
}); | ||
if (Object.keys(schema.api[key]).length === 0) errors.push({ | ||
message: "Schema API enpoints must have at least one method " + key + " does not" | ||
}); | ||
validateObject(schema.api[key], { | ||
type: 'object', | ||
patternProperties: { | ||
'^(get|post|put|delete)': { | ||
type: 'object', | ||
properties: { | ||
security: { | ||
enum: ["none", "api-key", "oauth"] | ||
}, | ||
customCode: { | ||
type: "boolean" | ||
}, | ||
collection: { | ||
type: "string", | ||
conform: function(v) { | ||
if (schema.collections[v]) { | ||
return true; | ||
} | ||
return false; | ||
} | ||
}, | ||
queryType: { | ||
enum: ["find", "insert", "delete", "save"] | ||
}, | ||
propertyGroup: { | ||
type: "string", | ||
conform: function(v, object) { | ||
if (object.collection && schema.collections[object.collection] && schema.collections[object.collection].propertyGroups && schema.collections[object.collection].propertyGroups[v]) { | ||
return true; | ||
} | ||
return false; | ||
} | ||
}, | ||
requiredVariables: { | ||
type: "object", | ||
patternProperties: { | ||
'[a-zA-Z0-9]': { | ||
enum: ["string", "array", "number", "boolean", "null"] | ||
} | ||
} | ||
}, | ||
query: { | ||
type: "mongo-query", | ||
} | ||
} | ||
} | ||
} | ||
}, {sudo: true}, errors); | ||
} | ||
errors.push({ | ||
message: "We currently dont check propertyGroups in the DB Schema" | ||
}); | ||
return { | ||
valid: !(errors.length), | ||
errors: errors | ||
}; | ||
}; | ||
/** | ||
* Default messages to include with validation errors. | ||
*/ | ||
validate.messages = { | ||
@@ -179,5 +27,2 @@ required: "is required", | ||
/** | ||
* | ||
*/ | ||
validate.formats = { | ||
@@ -214,3 +59,3 @@ 'email': /^((([a-z]|\d|[!#\$%&'\*\+\-\/=\?\^_`{\|}~]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])+(\.([a-z]|\d|[!#\$%&'\*\+\-\/=\?\^_`{\|}~]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])+)*)|((\x22)((((\x20|\x09)*(\x0d\x0a))?(\x20|\x09)+)?(([\x01-\x08\x0b\x0c\x0e-\x1f\x7f]|\x21|[\x23-\x5b]|[\x5d-\x7e]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(\\([\x01-\x09\x0b\x0c\x0d-\x7f]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]))))*(((\x20|\x09)*(\x0d\x0a))?(\x20|\x09)+)?(\x22)))@((([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.)+(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.?$/i, | ||
test: function(value) { | ||
return true; | ||
return typeof(value) === 'object'; | ||
} | ||
@@ -220,28 +65,52 @@ } | ||
function mixin(obj) { | ||
var sources = Array.prototype.slice.call(arguments, 1); | ||
while (sources.length) { | ||
var source = sources.shift(); | ||
if (!source) { | ||
continue; | ||
} | ||
validate.types = require('./types').types; | ||
if (typeof(source) !== 'object') { | ||
throw new TypeError('mixin non-object'); | ||
} | ||
/** | ||
* Validation Functions | ||
*/ | ||
for (var p in source) { | ||
if (source.hasOwnProperty(p)) { | ||
obj[p] = source[p]; | ||
} | ||
} | ||
} | ||
// | ||
// ### function validate (object, schema, options) | ||
// #### {Object} object the object to validate. | ||
// #### {Object} schema (optional) the JSON Schema to validate against. | ||
// #### {Object} options (optional) options controlling the validation | ||
// process. | ||
// Validate <code>object</code> against a JSON Schema. | ||
// If <code>object</code> is self-describing (i.e. has a | ||
// <code>$schema</code> property), it will also be validated | ||
// against the referenced schema. [TODO]: This behaviour bay be | ||
// suppressed by setting the {@link #validate.options.???} | ||
// option to <code>???</code>.[/TODO] | ||
// | ||
// If <code>schema</code> is not specified, and <code>object</code> | ||
// is not self-describing, validation always passes. | ||
// | ||
// <strong>Note:</strong> in order to pass options but no schema, | ||
// <code>schema</code> <em>must</em> be specified in the call to | ||
// <code>validate()</code>; otherwise, <code>options</code> will | ||
// be interpreted as the schema. <code>schema</code> may be passed | ||
// as <code>null</code>, <code>undefinded</code>, or the empty object | ||
// (<code>{}</code>) in this case. | ||
// | ||
function validate(object, schema, options) { | ||
options = mixin({}, options); | ||
var errors = []; | ||
return obj; | ||
validateObject(object, schema, options, errors); | ||
// | ||
// TODO: self-described validation | ||
// if (! options.selfDescribing) { ... } | ||
// | ||
return { | ||
valid: !(errors.length), | ||
errors: errors | ||
}; | ||
} | ||
function validateObject(object, schema, options, errors) { | ||
var props, allProps = Object.keys(object), | ||
visitedProps = [], | ||
p = 0; | ||
var props; | ||
var visitedProps = []; | ||
var p = 0; | ||
@@ -283,6 +152,5 @@ // see 5.2 | ||
function validateProperty(object, value, property, schema, options, errors) { | ||
var format, | ||
valid, | ||
spec, | ||
type; | ||
var format; | ||
var valid; | ||
var type; | ||
@@ -292,5 +160,5 @@ function constrain(name, value, assert) { | ||
if(schema.conform) { | ||
error(name, property, value, {message: "Expected a valid "+property}, errors); | ||
return error(name, property, value, {message: "Expected a valid "+property}, errors); | ||
} else { | ||
error(name, property, value, schema, errors); | ||
return error(name, property, value, schema, errors); | ||
} | ||
@@ -333,2 +201,3 @@ } | ||
checkType(value, schema.type, function checkTypeCallback(err, type) { | ||
if (err) return error('type', property, typeof value, schema, errors); | ||
@@ -400,15 +269,257 @@ | ||
case 'object': | ||
// Recursive validation | ||
if (schema.properties || schema.patternProperties) { | ||
validateObject(value, schema, options, errors); | ||
return validateObject(value, schema, options, errors); | ||
} | ||
return; | ||
break; | ||
} | ||
}); | ||
}; | ||
function checkType(val, type, callback) { | ||
var result = false, | ||
types = isArray(type) ? type : [type]; | ||
function validateAction(action, schema, options) { | ||
options = mixin({}, options); | ||
var reservedVariables = ['user']; | ||
var queryTypes = ["find", "delete", "update", "insert", "save", "stream"]; | ||
var errors = []; | ||
var collection = schema.collections[action.collection]; | ||
// Validate queryType | ||
if (queryTypes.indexOf(action.queryType) == -1) { | ||
errors.push(new Error("Invalid query type " + action.queryType)); | ||
} | ||
// Property group | ||
if (!collection.propertyGroups[action.propertyGroup].length) { | ||
errors.push(new Error("Invalid propertyGroup " + action.propertyGroup + " on collection " + action.collection)); | ||
} | ||
// Validate keys for query | ||
var validPropertyKeys = Object.keys(collection.properties); | ||
var invalidQueryKeys = Object.keys(action.query).filter( | ||
function(value){ | ||
if (value == 'id') return false; | ||
return (validPropertyKeys.indexOf(value) == -1); | ||
} | ||
); | ||
if (invalidQueryKeys.length) { | ||
for (var key in invalidQueryKeys) { | ||
errors.push(new Error("Invalid query field " + invalidQueryKeys[key] + " on collection " + action.collection)); | ||
} | ||
} | ||
// Validate values for query | ||
if (action.requiredVariables) { | ||
var requiredVariables = Object.keys(action.requiredVariables); | ||
for (var key in action.query) { | ||
if (key == 'id') continue; | ||
if (reservedVariables.indexOf(key) != -1) { | ||
errors.push(new Error("Invalid query field " + key + " because it is reserved")); | ||
} | ||
var queryValue = action.query[key]; | ||
if (/\${([a-zA-Z0-9\/]+)}/.test(queryValue)) { | ||
var inferedVariable = queryValue.substring(2, queryValue.length - 1); | ||
if (requiredVariables.indexOf(inferedVariable) == -1) { | ||
errors.push(new Error("Invalid query field " + inferedVariable + " not in requiredVariables")); | ||
} | ||
if (action.requiredVariables[inferedVariable] != collection.properties[inferedVariable].type) { | ||
errors.push(new Error("Required field type mismatch for " + inferedVariable)); | ||
} | ||
} | ||
} | ||
} | ||
return { | ||
valid: !(errors.length), | ||
errors: errors | ||
}; | ||
} | ||
function validateBlueprint(schema) { | ||
var errors = []; | ||
if (!schema.collections) errors.push({ | ||
message: "Schema MUST contain collections" | ||
}); | ||
for (var key in schema.collections) { | ||
if (/[^a-zA-Z0-9]/.test(key)) errors.push({ | ||
message: "Schema collection names must be alphanumaric, " + key + " is not" | ||
}); | ||
if (key.length > 20 || key.length < 2) errors.push({ | ||
message: "Schema collection names must be more then 2 characters but less then 20, " + key + " is " + key.length | ||
}); | ||
var collection = schema.collections[key]; | ||
for (var prop in collection.properties) { | ||
var property = collection.properties[prop]; | ||
if (/[^a-zA-Z0-9]/.test(prop)) errors.push({ | ||
message: "Collection field names must be alphanumaric, " + prop + " is not" | ||
}); | ||
if (prop.length > 12 || prop.length < 2) errors.push({ | ||
message: "Collection field names must be more then 2 characters but less then 12, " + prop + " is " + prop.length | ||
}); | ||
if (!property.type) errors.push({ | ||
message: "Collection fields must have a type " + prop + " does not" | ||
}); | ||
} | ||
// Validate collection propertyGroups | ||
for (var propGroup in collection.propertyGroups) { | ||
validatePropertyGroup(schema, key, propGroup, function (_errs) { | ||
for (var _err in _errs) { | ||
errors.push(_errs[_err]); | ||
} | ||
}); | ||
} | ||
} | ||
for (var key in schema.api) { | ||
if (/[^a-zA-Z0-9\/{}]/.test(key)) errors.push({ | ||
message: "Schema API endpoints names must be alphanumaric (except for variables), and start with a slash, " + key + " does not" | ||
}); | ||
if (Object.keys(schema.api[key]).length === 0) errors.push({ | ||
message: "Schema API endpoints must have at least one method " + key + " does not" | ||
}); | ||
validateObject(schema.api[key], { | ||
type: 'object', | ||
patternProperties: { | ||
'^(get|post|put|delete)': { | ||
type: 'object', | ||
properties: { | ||
security: { | ||
enum: ["none", "api-key", "oauth"] | ||
}, | ||
customCode: { | ||
type: "boolean" | ||
}, | ||
collection: { | ||
type: "string", | ||
conform: function(v) { | ||
if (schema.collections[v]) { | ||
return true; | ||
} | ||
return false; | ||
} | ||
}, | ||
queryType: { | ||
enum: ["find", "insert", "delete", "save"] | ||
}, | ||
propertyGroup: { | ||
type: "string", | ||
conform: function(v, object) { | ||
if (object.collection && schema.collections[object.collection] && schema.collections[object.collection].propertyGroups && schema.collections[object.collection].propertyGroups[v]) { | ||
return true; | ||
} | ||
return false; | ||
} | ||
}, | ||
requiredVariables: { | ||
type: "object", | ||
patternProperties: { | ||
'[a-zA-Z0-9]': { | ||
enum: ["string", "array", "number", "boolean", "null"] | ||
} | ||
} | ||
}, | ||
query: { | ||
type: "mongo-query" | ||
} | ||
} | ||
} | ||
} | ||
}, {sudo: true}, errors); | ||
// Validate Actions | ||
for (var action in schema.api[key]) { | ||
if (schema.api[key][action].query) { | ||
var valid = validateAction(schema.api[key][action], schema); | ||
if (!valid.valid) { | ||
for (var _err in valid.errors) { | ||
errors.push(valid.errors[_err]); | ||
} | ||
} | ||
} | ||
} | ||
} | ||
return { | ||
valid: !(errors.length), | ||
errors: errors | ||
}; | ||
}; | ||
function validatePropertyGroup (schema, collectionKey, propertyGroupKey, callback) { | ||
var errors = []; | ||
var collection = schema.collections[collectionKey]; | ||
var propertyGroup = collection.propertyGroups[propertyGroupKey]; | ||
for (var index in propertyGroup) { | ||
var propOrLink = propertyGroup[index]; | ||
if (propOrLink.indexOf("{") != -1) { | ||
var origPropOrLink = propOrLink; | ||
var propOrLink = origPropOrLink.substring(0, origPropOrLink.indexOf("{")); | ||
var refCollection = collection.properties[propOrLink].ref; | ||
var refPropGroup = propOrLink.substring(origPropOrLink.indexOf("{") + 1, origPropOrLink.indexOf("}")); | ||
} | ||
// Check that property is set | ||
if (!collection.properties[propOrLink]) { | ||
errors.push(new Error('Invalid property ' + propOrLink + " not defined on the collection " + collectionKey)); | ||
} | ||
// Validate Property reference | ||
if (refCollection && !schema.collections[refCollection]) { | ||
errors.push(new Error('Invalid referenced collection of ' + refCollection + ' on the property ' + propOrLink + ' in the collection ' + collectionKey)); | ||
} | ||
if (refPropGroup && !schema.collections[refCollection].propertyGroups[refPropGroup]) { | ||
errors.push(new Error('Invalid property group of ' + propertyGroupKey + ' on the collection ' + collectionKey)); | ||
} | ||
} | ||
return callback(errors); | ||
} | ||
/** | ||
* Helper Functions | ||
*/ | ||
function mixin(obj) { | ||
var sources = Array.prototype.slice.call(arguments, 1); | ||
while (sources.length) { | ||
var source = sources.shift(); | ||
if (!source) { | ||
continue; | ||
} | ||
if (typeof(source) !== 'object') { | ||
throw new TypeError('mixin non-object'); | ||
} | ||
for (var p in source) { | ||
if (source.hasOwnProperty(p)) { | ||
obj[p] = source[p]; | ||
} | ||
} | ||
} | ||
return obj; | ||
} | ||
function checkType (val, type, callback) { | ||
var types = isArray(type) ? type : [type]; | ||
// No type - no check | ||
@@ -418,5 +529,7 @@ if (type === undefined) return callback(null, type); | ||
// Go through available types | ||
// And fine first matching | ||
// and find first primitive match | ||
for (var i = 0, l = types.length; i < l; i++) { | ||
type = types[i].toLowerCase().trim(); | ||
if (type === 'string' ? typeof val === 'string' : | ||
@@ -427,8 +540,18 @@ type === 'array' ? isArray(val) : | ||
type === 'null' ? val === null : | ||
type === 'boolean' ? typeof val === 'boolean' : false) { | ||
type === 'boolean' ? typeof val === 'boolean' : false) | ||
{ | ||
return callback(null, type); | ||
} | ||
// Check advanced types | ||
if (validate.types.hasOwnProperty(type)) { | ||
if (validate.types[type].check(val) ) { | ||
return callback(null, type); | ||
} | ||
} | ||
}; | ||
callback(true); | ||
// If nothing then fail all the things | ||
return callback(true); | ||
}; | ||
@@ -468,3 +591,2 @@ | ||
})(typeof(window) === 'undefined' ? module.exports : (window.json = window.json || {})); |
@@ -145,2 +145,4 @@ /*jslint node: true, nomen: true, regexp: true, indent: 4*/ | ||
module.exports.normalizeQuery = normalizeQuery; | ||
module.exports({ field: { query: [{test: "dog"}, {$frog: "rest"}], $lt: "value2" } }); |
@@ -29,4 +29,2 @@ var assert = require('chai').assert, | ||
assert.strictEqual(res.errors.length, 0, res); | ||
} | ||
@@ -41,2 +39,3 @@ | ||
}; | ||
validator.mixin(schema.properties.field, argument); | ||
@@ -267,2 +266,5 @@ | ||
}, | ||
name: { | ||
type: "string" | ||
}, | ||
gitRepo: { | ||
@@ -305,4 +307,3 @@ type: "string" | ||
full: [ | ||
"name", | ||
"applicationId", | ||
"appId", | ||
"APIKeys{full}", | ||
@@ -309,0 +310,0 @@ "devKeys{full}", |
{ | ||
"name": "borgdb", | ||
"version": "0.0.7", | ||
"version": "0.1.0", | ||
"description": "A DB agnostic NoSQL DB abstraction layer - NOT PRODUCTION READY", | ||
"main": "./lib/BorgDB.js", | ||
"scripts": { | ||
"test": "./node_modules/mocha/bin/mocha -u tdd -R spec --check-leaks --slow 15 --no-colors $(find tests -name 'test_*.js')" | ||
"test": "./node_modules/mocha/bin/mocha -u tdd -R spec --check-leaks --slow 15 $(find tests -name 'test_*.js')" | ||
}, | ||
@@ -15,11 +15,21 @@ "devDependencies": { | ||
}, | ||
"author": "Matt Apperson <matt@appersonlabs.com>", | ||
"author": { | ||
"name": "Matt Apperson", | ||
"email": "matt@appersonlabs.com" | ||
}, | ||
"license": "MIT", | ||
"dependencies": { | ||
"mongoskin": "~0.6.0", | ||
"mongodb": "*", | ||
"mongify": "appersonlabs/mongify", | ||
"objectdiff": "*", | ||
"async": "~0.2.9", | ||
"rethinkdb": "*", | ||
"node-v8-clone": "*", | ||
"mongo-join": "cbumgard/node-mongo-join", | ||
"mocha-mongo": "~0.1.1" | ||
} | ||
}, | ||
"readme": "ERROR: No README data found!", | ||
"_id": "borgdb@0.0.7", | ||
"_from": "borgdb@*" | ||
} |
@@ -10,2 +10,3 @@ var async = require('async'), | ||
var mongoURI = 'mongodb://localhost/test'; | ||
var MongoDB = require('mongoskin') | ||
var mongo = require('mocha-mongo')(mongoURI); | ||
@@ -15,2 +16,7 @@ var drop = mongo.drop(); //only need to create this once | ||
suite('Basic Usage', function() { | ||
var conn = MongoDB.db(mongoURI, { | ||
safe: true | ||
}); | ||
describe('BorgDB', function() { | ||
@@ -25,8 +31,7 @@ it('should be able to access the BorgDB object', drop(function(notUsedDB, done) { | ||
it('should connect to the mongodb database without error', function(done) { | ||
BorgDB.init('mongodb', { | ||
url: mongoURI | ||
}, done); | ||
BorgDB.init('mongodb', conn, done); | ||
}); | ||
}); | ||
describe('new BorgDB.instance()', function() { | ||
@@ -42,2 +47,3 @@ it('should return a valid instance of the BorgDB engine', function(done) { | ||
}); | ||
describe('db.regester()', function() { | ||
@@ -132,7 +138,10 @@ it('should regester a schema and create a model', function(done) { | ||
it('should get data from the DB', function(done) { | ||
db.find({ | ||
db.find('User', { | ||
firstname: "Barack Obama" | ||
}, "User{test}", function(err, docs) { | ||
}, function(err, docs) { | ||
assert.isNull(err, 'there was no error'); | ||
assert.isArray(docs); | ||
assert.isDefined(docs[0].id, 'id has been defined'); | ||
assert.isUndefined(docs[0]._id, '_id should not be defined'); | ||
done(); | ||
@@ -142,2 +151,3 @@ }); | ||
}); | ||
describe('db.delete()', function() { | ||
@@ -144,0 +154,0 @@ it('should delete data from the DB', function(done) { |
@@ -13,95 +13,86 @@ var async = require('async'), | ||
suite('Test Hook Usage', function() { | ||
describe('new BorgDB.instance()', function() { | ||
it('should return a valid instance of the BorgDB engine', drop(function(notUsedDB, done) { | ||
db = new BorgDB.instance(); | ||
BorgDB.init('mongodb', { url: mongoURI }, function () { | ||
assert.isObject(db); | ||
assert.strictEqual(Object.keys(db.schemas).length, 0); | ||
suite('Test Hook Usage', function() { | ||
done(); | ||
})); | ||
}); | ||
describe('db.hook()', function() { | ||
it('should continue without error', function(done) { | ||
db.hook('collection_name', function(name) { | ||
return 'test_'+ name; | ||
describe('db.hook()', function() { | ||
it('should continue without error', function(done) { | ||
db = new BorgDB.instance(); | ||
db.hook('collection_name', function(name) { | ||
return 'test_'+ name; | ||
}); | ||
assert.isFunction(db.hooks['collection_name']); | ||
done(); | ||
}); | ||
assert.isFunction(db.hooks['collection_name']); | ||
done(); | ||
}); | ||
}); | ||
describe('db.regester()', function() { | ||
it('should regester a schema and create a model', function(done) { | ||
db.regester("User", { | ||
properties: { | ||
firstname: { | ||
datatype: "string", | ||
required: true | ||
describe('BorgDB with Hook', function() { | ||
beforeEach( drop(function(notUsedDB, done) { | ||
db = new BorgDB.instance(); | ||
db.regester("User", { | ||
properties: { | ||
firstname: { | ||
datatype: "string", | ||
required: true | ||
}, | ||
lastname: { | ||
datatype: "string", | ||
required: false | ||
}, | ||
email: { | ||
datatype: "string", | ||
required: true | ||
}, | ||
password: { | ||
datatype: "password", | ||
required: true | ||
} | ||
}, | ||
lastname: { | ||
datatype: "string", | ||
required: false | ||
}, | ||
email: { | ||
datatype: "string", | ||
required: true | ||
}, | ||
password: { | ||
datatype: "password", | ||
required: true | ||
propertyGroups: { | ||
test: [ | ||
"firstname", | ||
"email" | ||
] | ||
} | ||
}, | ||
propertyGroups: { | ||
test: [ | ||
"firstname", | ||
"email" | ||
] | ||
} | ||
}); | ||
db.create('User', { | ||
"firstname": "Barack Obama", | ||
"lastname": "Barack lastname", | ||
"email": "Barack@Obama.com", | ||
"password": "Obama 123", | ||
}, function(err, doc) { | ||
done(); | ||
}); | ||
})); | ||
it('should find data from the DB', function(done) { | ||
db.find({ | ||
firstname: "Barack Obama" | ||
}, "User{test}", function(err, docs) { | ||
assert.isNull(err, 'there was no error'); | ||
assert.isArray(docs); | ||
done(); | ||
}); | ||
}); | ||
assert.notEqual(Object.keys(db.schemas).length, 0); | ||
done(); | ||
}); | ||
}); | ||
describe('db.create()', function() { | ||
it('should save data to the DB', function(done) { | ||
db.create('User', { | ||
"firstname": "Barack Obama", | ||
"lastname": "Barack lastname", | ||
"email": "Barack@Obama.com", | ||
"password": "Obama 123", | ||
}, function(err, doc) { | ||
assert.isNull(err, 'there was no error'); | ||
assert.isObject(doc); | ||
done(); | ||
it('should delete data from the DB', function(done) { | ||
db.delete("User", { | ||
firstname: "Barack Obama" | ||
}, function(err, success) { | ||
assert.isNull(err, 'there was no error'); | ||
assert.strictEqual(success, true); | ||
done(); | ||
}); | ||
}); | ||
}); | ||
}); | ||
describe('db.find()', function() { | ||
it('should find data from the DB', function(done) { | ||
db.find({ | ||
firstname: "Barack Obama" | ||
}, "User{test}", function(err, docs) { | ||
assert.isNull(err, 'there was no error'); | ||
assert.isArray(docs); | ||
done(); | ||
}); | ||
}); | ||
}); | ||
describe('db.delete()', function() { | ||
it('should delete data from the DB', function(done) { | ||
db.delete("User", { | ||
firstname: "Barack Obama" | ||
}, function(err, success) { | ||
assert.isNull(err, 'there was no error'); | ||
assert.strictEqual(success, true); | ||
done(); | ||
}); | ||
}); | ||
}); | ||
}); | ||
// Create a new person |
@@ -6,4 +6,2 @@ var async = require('async'), | ||
var db; | ||
// Testing config | ||
@@ -15,13 +13,26 @@ var mongoURI = 'mongodb://localhost/test'; | ||
suite('Test Hook Usage', function() { | ||
describe('new BorgDB.instance()', function() { | ||
it('should return a valid instance of the BorgDB engine', drop(function(notUsedDB, done) { | ||
db = new BorgDB.instance(); | ||
assert.isObject(db); | ||
assert.strictEqual(Object.keys(db.schemas).length, 0); | ||
BorgDB.init('mongodb', { | ||
url: mongoURI | ||
}, function () { | ||
db = new BorgDB.instance(); | ||
done(); | ||
assert.isObject(db); | ||
assert.strictEqual(Object.keys(db.schemas).length, 0); | ||
done(); | ||
}); | ||
})); | ||
}); | ||
describe('db.regester()', function() { | ||
beforeEach(function(){ | ||
db = new BorgDB.instance(); | ||
}); | ||
it('should regester a schema and create a model', function(done) { | ||
@@ -70,6 +81,4 @@ db.regester("User", { | ||
}); | ||
}); | ||
describe('db.create()', function() { | ||
it('should save data to the DB', function(done) { | ||
it('should save data to the DB', function (done) { | ||
db.create('Sexes', { | ||
@@ -92,7 +101,88 @@ "name": "male" | ||
}); | ||
}); | ||
}); | ||
// it('should drop the database', function (done) { | ||
// drop(function (notUsedDB, done) { | ||
// db.find({ | ||
// firstname: "Barack Obama" | ||
// }, "User", function(err, docs) { | ||
// console.log(err, docs); | ||
// done(); | ||
// }); | ||
// }); | ||
// }); | ||
}); | ||
describe('db.find().join()', function() { | ||
describe('BorgDB', function() { | ||
beforeEach( drop(function(notUsedDB, done) { | ||
db = new BorgDB.instance(); | ||
db.regester("User", { | ||
properties: { | ||
firstname: { | ||
datatype: "string", | ||
required: true | ||
}, | ||
lastname: { | ||
datatype: "string", | ||
required: false | ||
}, | ||
email: { | ||
datatype: "string", | ||
required: true | ||
}, | ||
sex: { | ||
datatype: "string", | ||
ref: "Sexes", | ||
relationship: "one-to-many" | ||
}, | ||
password: { | ||
datatype: "password", | ||
required: true | ||
} | ||
}, | ||
propertyGroups: { | ||
test: [ | ||
"firstname", | ||
"email" | ||
] | ||
} | ||
}); | ||
db.regester("Sexes", { | ||
properties: { | ||
name: { | ||
datatype: "string", | ||
required: true | ||
} | ||
} | ||
}); | ||
db.create('Sexes', { | ||
"name": "male" | ||
}, function(err, doc) { | ||
if (err) { | ||
console.log(err); | ||
done(); | ||
} | ||
if (doc) { | ||
db.create('User', { | ||
"firstname": "Barack Obama", | ||
"lastname": "Barack lastname", | ||
"email": "Barack@Obama.com", | ||
"sex": doc.id, | ||
"password": "Obama 123", | ||
}, function(err, doc) { | ||
done(); | ||
}); | ||
} else { | ||
done(); | ||
} | ||
}); | ||
})); | ||
it('should find data from the DB', function(done) { | ||
@@ -102,6 +192,7 @@ db.find({ | ||
}, "User", function(err, docs) { | ||
docs[0].sex.getCollection(function(err, doc) { | ||
assert.isNull(err, 'there was no error'); | ||
assert.isObject(doc); | ||
assert.equal(docs[0].sex.toString(), doc.id.toString()) | ||
done(); | ||
@@ -112,5 +203,56 @@ }); | ||
}); | ||
}); | ||
describe('db.delete()', function() { | ||
it('should find data from the DB, and save changes without saving the joined data to the master collection', function(done) { | ||
db.find({ | ||
firstname: "Barack Obama" | ||
}, "User", function(err, docs) { | ||
docs[0].sex.getCollection(function(err, doc) { | ||
assert.isNull(err, 'there was no error'); | ||
assert.isObject(doc); | ||
}); | ||
docs[0].firstname = "Matt Apperson"; | ||
docs[0].save(function(err, success) { | ||
assert.isTrue(success); | ||
db.find({ | ||
firstname: "Matt Apperson" | ||
}, "User", function(err, doc) { | ||
assert.isObject(doc[0]); | ||
assert.isObject(doc[0].sex); | ||
assert.isString(doc[0].firstname); | ||
done(); | ||
}); | ||
}); | ||
}); | ||
}); | ||
it('should find data from the DB, and save changes in the joind data', function(done) { | ||
db.find({ | ||
firstname: "Barack Obama" | ||
}, "User", function(err, docs) { | ||
docs[0].sex.getCollection(function(err, doc) { | ||
assert.isNull(err, 'there was no error'); | ||
assert.isObject(doc); | ||
}); | ||
docs[0].sex.name = "frog"; | ||
docs[0].save(function(err, success) { | ||
assert.isTrue(success); | ||
db.find({ | ||
firstname: "Barack Obama" | ||
}, "User", function(err, docs) { | ||
assert.isObject(docs[0]); | ||
assert.isObject(docs[0].sex); | ||
assert.equal(docs[0].sex.name, "frog"); | ||
done(); | ||
}); | ||
}); | ||
}); | ||
}); | ||
it('should delete data from the DB', function(done) { | ||
db.delete("User", { | ||
@@ -123,4 +265,7 @@ firstname: "Barack Obama" | ||
}); | ||
}); | ||
}); | ||
}); // end describe | ||
}); |
GitHub dependency
Supply chain riskContains a dependency which resolves to a GitHub URL. Dependencies fetched from GitHub specifiers are not immutable can be used to inject untrusted code or reduce the likelihood of a reproducible install.
Found 1 instance in 1 package
Wildcard dependency
QualityPackage has a dependency with a floating version range. This can cause issues if the dependency publishes a new major version.
Found 3 instances in 1 package
130813
2734
9
24
5
2
+ Addedmongify@appersonlabs/mongify
+ Addedmongodb@*
+ Addednode-v8-clone@*
+ Addedrethinkdb@*
+ Added@mongodb-js/saslprep@1.1.9(transitive)
+ Added@types/webidl-conversions@7.0.3(transitive)
+ Added@types/whatwg-url@11.0.5(transitive)
+ Addedbindings@1.2.1(transitive)
+ Addedbluebird@2.11.0(transitive)
+ Addedbson@6.10.2(transitive)
+ Addedmemory-pager@1.5.0(transitive)
+ Addedmongodb@6.13.0(transitive)
+ Addedmongodb-connection-string-url@3.0.2(transitive)
+ Addednan@1.1.2(transitive)
+ Addednode-v8-clone@0.6.2(transitive)
+ Addedpunycode@2.3.1(transitive)
+ Addedrethinkdb@2.4.2(transitive)
+ Addedsparse-bitfield@3.0.3(transitive)
+ Addedtr46@5.0.0(transitive)
+ Addedwebidl-conversions@7.0.0(transitive)
+ Addedwhatwg-url@14.1.0(transitive)