New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

datastar

Package Overview
Dependencies
Maintainers
5
Versions
19
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

datastar - npm Package Compare versions

Comparing version 3.0.2 to 4.0.0-beta.1

.eslintrc

2

.nyc_output/processinfo/index.json

@@ -1,1 +0,1 @@

{"processes":{"0bedbb02-d497-41ad-b3df-988a4588f622":{"parent":"2f9667c8-5595-46ba-b9ce-e8035cd9c713","children":[]},"2f9667c8-5595-46ba-b9ce-e8035cd9c713":{"parent":"66b2dfb9-bbd0-43ae-8b06-b7dabd90d8f3","children":["0bedbb02-d497-41ad-b3df-988a4588f622"]},"66b2dfb9-bbd0-43ae-8b06-b7dabd90d8f3":{"parent":null,"children":["2f9667c8-5595-46ba-b9ce-e8035cd9c713"]}},"files":{"/Users/jpage/Code/datastar/lib/index.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/model.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/camel-case.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/memoize.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/schema.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/snake-case.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/statement-builder/index.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/statement-builder/statements/index.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/statement-builder/statements/create.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/statement-builder/statement.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/statement-builder/statements/find.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/statement-builder/statements/remove.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/statement-builder/statements/table.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/statement-builder/partial-statements/with.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/statement-builder/statements/alter.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/statement-builder/statements/update.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/statement-builder/compound-statement.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/statement-collection.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/attributes.js":["0bedbb02-d497-41ad-b3df-988a4588f622"],"/Users/jpage/Code/datastar/lib/await-wrap.js":["0bedbb02-d497-41ad-b3df-988a4588f622"]},"externalIds":{}}
{"processes":{"147bb809-59a0-45a5-ab5b-18e7057da5b1":{"parent":"d90be867-ead0-4d00-aff0-f10a8396e41a","children":["e060164e-eefe-4552-9713-fdb470e49706"]},"d90be867-ead0-4d00-aff0-f10a8396e41a":{"parent":null,"children":["147bb809-59a0-45a5-ab5b-18e7057da5b1"]},"e060164e-eefe-4552-9713-fdb470e49706":{"parent":"147bb809-59a0-45a5-ab5b-18e7057da5b1","children":[]}},"files":{"/Users/scommisso/Projects/datastar/lib/index.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/model.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/schema.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/statement-builder/index.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/statement-builder/statements/index.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/statement-builder/statements/create.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/statement-builder/statement.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/statement-builder/statements/find.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/statement-builder/statements/remove.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/statement-builder/statements/table.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/statement-builder/partial-statements/with.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/statement-builder/statements/alter.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/statement-builder/statements/update.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/statement-builder/compound-statement.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/statement-collection.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/attributes.js":["e060164e-eefe-4552-9713-fdb470e49706"],"/Users/scommisso/Projects/datastar/lib/await-wrap.js":["e060164e-eefe-4552-9713-fdb470e49706"]},"externalIds":{}}

@@ -1,2 +0,2 @@

var
const
snakeCase = require('./snake-case'),

@@ -6,4 +6,2 @@ camelCase = require('./camel-case'),

module.exports = Attributes;
//

@@ -13,91 +11,93 @@ // A class for managing state change and alias manipulation

//
function Attributes(instance, data) {
this.Model = instance.Model;
this.instance = instance;
this.schema = this.Model.schema;
this.data = this.schema.prepareForUse(data);
this._isDirty = false;
this._was = {};
this._changed = {};
}
//
// An explicit getter method for the properties in `data`
// that manages aliases
//
Attributes.prototype.get = function (name) {
var key = this.schema.entityKeyToColumnName(name);
var value = this.data[key];
if (this.schema.requiresNullConversion(key)) {
return this.schema.valueToNull(value);
class Attributes {
constructor(instance, data) {
this.Model = instance.Model;
this.instance = instance;
this.schema = this.Model.schema;
this.data = this.schema.prepareForUse(data);
this._isDirty = false;
this._was = {};
this._changed = {};
}
if (this.schema.isKey(key)) { return value; }
return this.schema.nullToValue(this.schema.fieldMeta(key), value);
};
//
// An explicit setter method for the properties in `data`
// that manages aliases and records a change state as well
// as emitting a state change event if the Model has enabled it.
//
Attributes.prototype.set = function (name, value) {
if (this.schema._aliases[name]) name = this.schema._aliases[name];
var camelName = camelCase(name);
var snakeName = snakeCase(name);
this._isDirty = true;
var oldData = this.data[snakeName];
var self = this;
// only track the original value in case of multiple changes
this._was[snakeName] = this._was[snakeName] || oldData;
this._changed[snakeName] = this.data[snakeName] = value;
if (this.Model.options.notifyAttributeChanges) {
this.Model.emit('attribute:change', self.instance, camelName, value, oldData);
//
// An explicit getter method for the properties in `data`
// that manages aliases
//
get(name) {
const key = this.schema.entityKeyToColumnName(name);
const value = this.data[key];
if (this.schema.requiresNullConversion(key)) {
return this.schema.valueToNull(value);
}
if (this.schema.isKey(key)) { return value; }
return this.schema.nullToValue(this.schema.fieldMeta(key), value);
}
};
Attributes.prototype.was = function (name) {
return this._was[snakeCase(name)];
};
//
// An explicit setter method for the properties in `data`
// that manages aliases and records a change state as well
// as emitting a state change event if the Model has enabled it.
//
set(name, value) {
if (this.schema._aliases[name]) name = this.schema._aliases[name];
const camelName = camelCase(name);
const snakeName = snakeCase(name);
this._isDirty = true;
const oldData = this.data[snakeName];
//
// Return the previous value generated based on current and previous data
//
Attributes.prototype.previous = function () {
var self = this;
return Object.keys(this.data).reduce(function (prev, key) {
if (!(key in prev)) {
prev[key] = self.data[key];
// only track the original value in case of multiple changes
this._was[snakeName] = this._was[snakeName] || oldData;
this._changed[snakeName] = this.data[snakeName] = value;
if (this.Model.options.notifyAttributeChanges) {
this.Model.emit('attribute:change', this.instance, camelName, value, oldData);
}
}
return prev;
}, clone(this._was));
};
was(name) {
return this._was[snakeCase(name)];
}
Attributes.prototype.needsValidation = function () {
var names = this.schema.keys(), data = this.data;
//
// Return the previous value generated based on current and previous data
//
previous() {
return Object.keys(this.data).reduce((prev, key) => {
if (!(key in prev)) {
prev[key] = this.data[key];
}
if (this.schema.lookups) {
names = names.concat(Object.keys(this.schema.lookupTables));
return prev;
}, clone(this._was));
}
return names.reduce(function (memo, key) {
// don't include keys that are undefined
if (key in memo || key in data) {
memo[key] = key in memo ? memo[key] : data[key];
needsValidation() {
let names = this.schema.keys(), data = this.data;
if (this.schema.lookups) {
names = names.concat(Object.keys(this.schema.lookupTables));
}
return memo;
}, clone(this._changed));
};
//
// Public getter for state change management
//
Attributes.prototype.isDirty = function (name) {
return name ? snakeCase(name) in this._changed : this._isDirty;
};
return names.reduce(function (memo, key) {
// don't include keys that are undefined
if (key in memo || key in data) {
memo[key] = key in memo ? memo[key] : data[key];
}
return memo;
}, clone(this._changed));
}
Attributes.prototype.toJSON = function (snake) {
var data = this.schema.reNull(this.data);
return snake ? data : this.schema.toCamelCase(data);
};
//
// Public getter for state change management
//
isDirty(name) {
return name ? snakeCase(name) in this._changed : this._isDirty;
}
toJSON(snake) {
const data = this.schema.reNull(this.data);
return snake ? data : this.schema.toCamelCase(data);
}
}
module.exports = Attributes;

@@ -1,3 +0,3 @@

var memoize = require('./memoize').memoize1;
const memoize = require('./memoize').memoize1;
module.exports = memoize(require('to-camel-case'));
/* eslint no-proto:0 */
var Priam = require('priam'),
const
Priam = require('priam'),
Understudy = require('understudy'),
jtc = require('joi-of-cql'),
Model = require('./model'),
jtc = require('joi-of-cql'),
AwaitWrap = require('./await-wrap');
AwaitWrap = require('./await-wrap');
/**
* Constructor function for the Datastar object which is responsible
* for defining a set of models associated with a given connection
* defined in the `connect` function provided.
*
* @constructor
* @type {module.Datastar}
* @param {Object} - options
* @param {function} - connect
*/
class Datastar {
/**
* Constructor function for the Datastar object which is responsible
* for defining a set of models associated with a given connection
* defined in the `connect` function provided.
*
* @constructor
* @type {module.Datastar}
* @param {Object} options - Config options options
* @param {function} connect -
*/
constructor(options, connect) {
this.options = options || {};
this.connect = connect || this.connect;
}
var Datastar = module.exports = function Datastar(options, connect) {
this.options = options || {};
this.connect = connect || this.connect;
};
/**
* Attach the connection to the model constructor
* @param {Object} objectModel - Defined object model
* @returns {Datastar} - The datastar object with the attached object model
*/
attach(objectModel) {
if (!this.connection) {
this.connect();
}
//
// Alias to joi for defining schema for a model
//
Datastar.prototype.schema = jtc;
objectModel.connection = this.connection;
//
// Expose StatementCollection on the datastar instance
//
Datastar.prototype.StatementCollection = require('./statement-collection');
return this;
}
/**
* Attach the connection to the model constructor
* @param {Object} objectModel - Defined object model
* @returns {Datastar} - The datastar object with the attached object model
*/
Datastar.prototype.attach = function attach(objectModel) {
if (!this.connection) {
this.connect();
/**
* Default connection logic which works with `priam`. This abstract
* exists for future extensibility.
* @param {Function} callback - The async callback
* @returns {Datastar} - The datastar object
*/
connect(callback) {
const config = this.options.config;
//
// Use the cached connection if a model has been defined already via `attach`
// so we dont create more than 1 priam instance. This allows `connect` to be
// called with the callback to ensure the connection is pre-heated for all
// models
//
this.connection = this.connection || new Priam(this.options);
let create;
if (config && config.keyspaceOptions) {
const replClause = JSON.stringify(config.keyspaceOptions).replace(/"/g, "'");
create = `CREATE KEYSPACE IF NOT EXISTS ${config.keyspace} WITH replication = ${replClause};`;
}
if (create) {
//
// Try to create the keyspace. As a side effect pre-heat the connection
//
this.connection.cql(create, [], { keyspace: 'system' }, callback);
} else if (callback) {
//
// If a callback is passed, we pre-heat the connection
//
this.connection.connect(callback);
}
return this;
}
objectModel.connection = this.connection;
/**
* Close the underlying connection
*
* @param {Function} callback - The async callback
* @returns {Datastar} - The datastar object
*/
close(callback) {
this.connection.close(callback);
return this;
}
return this;
};
/*
* Defines a new Model with the given `name` using the
* `definition` function provided.
*/
define(name, definition, options) {
/**
* Default connection logic which works with `priam`. This abstract
* exists for future extensibility.
* @param {Function} callback - The async callback
* @returns {Datastar} - The datastar object
*/
Datastar.prototype.connect = function connect(callback) {
var config = this.options.config;
//
// Use the cached connection if a model has been defined already via `attach`
// so we dont create more than 1 priam instance. This allows `connect` to be
// called with the callback to ensure the connection is pre-heated for all
// models
//
this.connection = this.connection || new Priam(this.options);
if (!definition && typeof name === 'function') {
options = definition;
definition = name;
name = definition.name;
} else if (!options && typeof definition === 'object') {
options = definition;
definition = function () {
};
}
var create = config && config.keyspaceOptions &&
'CREATE KEYSPACE IF NOT EXISTS ' + config.keyspace +
' WITH replication = ' +
JSON.stringify(config.keyspaceOptions).replace(/"/g, "'") +
';';
if (!name) {
throw new Error('A name for the model is required.');
}
if (!definition && !options) {
throw new Error('A definition function or options are required.');
}
if (create) {
//
// Try to create the keyspace. As a side effect pre-heat the connection
// Adapted from resourceful
// https://github.com/flatiron/resourceful/blob/master/lib/resourceful/core.js#L82-L219
//
this.connection.cql(create, [], { keyspace: 'system' }, callback);
} else if (callback) {
// A simple factory stub where we attach anything to the instance of the Model
// that we deem necessary
//
// If a callback is passed, we pre-heat the connection
const Factory = function Factory(data) {
this.Model = Factory;
this.init(data);
};
//
this.connection.connect(callback);
}
return this;
};
// Setup inheritance
// "Trust me, I'm a scientist"
// "Back off, man. I'm a scientist." - Bill Murray
//
Factory.__proto__ = Model;
Factory.prototype.__proto__ = Model.prototype;
/**
* Close the underlying connection
*
* @param {Function} callback - The async callback
* @returns {Datastar} - The datastar object
*/
Datastar.prototype.close = function (callback) {
this.connection.close(callback);
return this;
};
Understudy.call(Factory);
// NOTE: Call definition here. Beneficial if
// there's any non-function props being set.
definition.call(Factory);
/*
* Defines a new Model with the given `name` using the
* `definition` function provided.
*/
Datastar.prototype.define = function define(name, definition, options) {
//
// Attach the connection to the factory constructor
//
this.attach(Factory);
if (!definition && typeof name === 'function') {
options = definition;
definition = name;
name = definition.name;
} else if (!options && typeof definition === 'object') {
options = definition;
definition = function () {
};
}
options = options || this.options;
options.schema = options.schema || {};
options.name = options.name || name;
//
// Initialize the model and the various attributes that belong there
//
Factory.init(options);
if (!name) {
throw new Error('A name for the model is required.');
return Factory;
}
if (!definition && !options) {
throw new Error('A definition function or options are required.');
}
}
//
// Adapted from resourceful
// https://github.com/flatiron/resourceful/blob/master/lib/resourceful/core.js#L82-L219
//
// A simple factory stub where we attach anything to the instance of the Model
// that we deem necessary
//
var Factory = function Factory(data) {
this.Model = Factory;
this.init(data);
};
//
// Alias to joi for defining schema for a model
//
Datastar.prototype.schema = jtc;
//
// Setup inheritance
// "Trust me, I'm a scientist"
// "Back off, man. I'm a scientist." - Bill Murray
//
//
// Expose StatementCollection on the datastar instance
//
Datastar.prototype.StatementCollection = require('./statement-collection');
Factory.__proto__ = Model;
Factory.prototype.__proto__ = Model.prototype;
Understudy.call(Factory);
// NOTE: Call definition here. Beneficial if
// there's any non-function props being set.
definition.call(Factory);
//
// Attach the connection to the factory constructor
//
this.attach(Factory);
options = options || this.options;
options.schema = options.schema || {};
options.name = options.name || name;
//
// Initialize the model and the various attributes that belong there
//
Factory.init(options);
return Factory;
};
Datastar.Priam = Priam;

@@ -168,1 +168,3 @@ Datastar.Understudy = Understudy;

Datastar.AwaitWrap = AwaitWrap;
module.exports = Datastar;
module.exports.memoize1 = function (fn) {
var cache = new Map();
const cache = new Map();
return function (arg) {

@@ -7,3 +7,3 @@ if (cache.has(arg)) {

}
var result = fn(arg);
const result = fn(arg);
cache.set(arg, result);

@@ -10,0 +10,0 @@ return result;

@@ -1,16 +0,13 @@

var
EventEmitter = require('events').EventEmitter,
once = require('one-time'),
pick = require('lodash.pick'),
async = require('async'),
clone = require('clone'),
assign = require('object-assign'),
through = require('through2'),
readOnly = require('read-only-stream'),
ls = require('list-stream'),
camelCase = require('./camel-case'),
Schema = require('./schema'),
StatementBuilder = require('./statement-builder'),
StatementCollection = require('./statement-collection'),
Attributes = require('./attributes');
const
{ EventEmitter } = require('events'),
{ PassThrough, Readable } = require('stream'),
pick = require('lodash.pick'),
async = require('async'),
clone = require('clone'),
assign = require('object-assign'),
camelCase = require('./camel-case'),
Schema = require('./schema'),
StatementBuilder = require('./statement-builder'),
StatementCollection = require('./statement-collection'),
Attributes = require('./attributes');

@@ -20,3 +17,3 @@ //

//
var singleTypes = ['count', 'one', 'first'];
const singleTypes = ['count', 'one', 'first'];

@@ -31,8 +28,6 @@ /*

*/
var Model = module.exports = function Model() {
const Model = module.exports = function Model() {
};
Model.init = function init(options) {
var self = this;
this.options = options || {};

@@ -64,5 +59,5 @@

//
Object.keys(EventEmitter.prototype).forEach(function (k) {
self[k] = function () {
return self.emitter[k].apply(self.emitter, arguments);
Object.keys(EventEmitter.prototype).forEach(k => {
this[k] = (...args) => {
return this.emitter[k].apply(this.emitter, args);
};

@@ -87,5 +82,5 @@ });

//
self.before('update:build', function buildUpdate(updateOptions, callback) {
var entities = updateOptions.entities;
var previous = updateOptions.previous;
this.before('update:build', (updateOptions, callback) => {
const entities = updateOptions.entities;
const previous = updateOptions.previous;

@@ -96,3 +91,3 @@ //

//
if (!self.schema.lookups || entities.length === previous.length) {
if (!this.schema.lookups || entities.length === previous.length) {
return callback();

@@ -111,5 +106,5 @@ }

//
async.map(entities, function (entity, next) {
self.findOne({
conditions: self.schema.filterPrimaryConditions(clone(entity))
async.map(entities, (entity, next) => {
this.findOne({
conditions: this.schema.filterPrimaryConditions(clone(entity))
}, next);

@@ -124,8 +119,8 @@ }, function (err, previous) {

if (self.options.ensureTables) {
self.ensureTables(function (err) {
if (this.options.ensureTables) {
this.ensureTables(err => {
if (err) {
return self.emit('error', err);
return this.emit('error', err);
}
self.emit('ensure-tables:finish', self.schema);
this.emit('ensure-tables:finish', this.schema);
});

@@ -143,4 +138,2 @@ }

Model[action] = function (options, callback) {
var self = this;
options = this.validateArgs(options, callback);

@@ -151,3 +144,3 @@ if (!options) {

var statements = options.statements = options.statements
const statements = options.statements = options.statements
|| (new StatementCollection(this.connection, options.strategy)

@@ -159,45 +152,36 @@ .consistency(options.consistency || this.writeConsistency));

//
this.perform(action + ':build', options, function (next) {
//
// We should keep this naming generic so we can remove all this
// boilerplate in the future.
//
var entities = options.entities;
//
// Remark: Certain cases this is the previous entity that could be used
// for update. its required for lookup-table update, otherwise we fetch it
//
var previous = options.previous;
this.perform(`${action}:build`, options, next => {
try {
//
// We should keep this naming generic so we can remove all this
// boilerplate in the future.
//
const entities = options.entities;
//
// Remark: Certain cases this is the previous entity that could be used
// for update. its required for lookup-table update, otherwise we fetch it
//
const previous = options.previous;
var error;
for (var e = 0; e < entities.length; e++) {
// shallow clone
var opts = assign({}, options);
for (let e = 0; e < entities.length; e++) {
// shallow clone
const opts = assign({}, options);
opts.previous = previous && previous.length
? options.previous[e]
: null;
opts.previous = previous && previous.length
? options.previous[e]
: null;
var statement = self.builder[action](opts, entities[e]);
const statement = this.builder[action](opts, entities[e]);
statements.add(statement);
}
//
// TODO: Use something like `errs` to propagate back the error with the
// actual entity object that was the culprit
// If there was an error building the statement, return early
//
if (statement instanceof Error) {
error = statement;
break;
}
statements.add(statement);
return void next();
} catch (err) {
setImmediate(next, err);
}
//
// If there was an error building the statement, return early
//
return error
? void setImmediate(next, error)
: void next();
}, function (err) {
}, err => {
if (err) {

@@ -212,3 +196,3 @@ return void callback(err);

//
self.perform(action + ':execute', options, function (next) {
this.perform(`${action}:execute`, options, function (next) {
statements.execute(next);

@@ -226,44 +210,22 @@ }, callback);

Model.find = function find(options, callback) {
options = options || {};
var self = this;
options = self.validateFind(options, callback);
if (!options) return;
//
// Handle the case where we return a stream error
//
if (options instanceof Error) {
return errorStream(options);
try {
options = this.validateFind(options || {}, callback);
} catch (err) {
return callback ? void callback(err) : errorStream(err);
}
options.type = options.type || 'all';
var action = options.type !== 'all'
? 'find:' + options.type
: 'find';
const iterable = this.iterate(options);
//
// If we are streaming, we need a proxy stream
//
var proxy;
if (options.iterable) {
return iterable;
}
//
// Make a proxy stream for returning a stream
//
if (options.stream) {
proxy = through.obj();
return Readable.from(iterable);
}
function done(err, result) {
if (err) {
return proxy
? proxy.emit('error', err)
: callback(err);
}
const action = options.type !== 'all'
? `find:${options.type}`
: 'find';
if (!proxy) {
return void callback(err, result);
}
}
//

@@ -273,56 +235,41 @@ // This allows for cascading of after functions to mutate the result of this

//
self.waterfall(action, options, function (next) {
var statement = self.builder.find(options);
//
// Ensure next can only be called once
// If we are not a stream, figure out if we want to return an object or an
// array, and return the appropriate thing by unwrapping if necessary
//
var fn = once(function (err, result) {
if (singleTypes.indexOf(options.type) !== -1) result = result && result[0];
next(err, result);
});
//
// TODO: Use something like `errs` to propagate back the error with the
// actual entity object that was the culprit
//
if (statement instanceof Error) {
return void setImmediate(next, statement);
this.waterfall(action, options, async next => {
const singleResult = singleTypes.includes(options.type);
const result = [];
try {
for await (const row of iterable) {
if (singleResult) {
return void next(null, row);
}
result.push(row);
}
return void next(null, singleResult ? result[0] : result);
} catch (err) {
return void next(err);
}
}, callback);
};
var stream = statement
//
// ExtendQuery returns the priam connection query so we have access to
// those functions
//
.extendQuery(self.connection.beginQuery())
//
// Allow configurable consistency
//
.consistency(options.consistency || self.readConsistency)
.stream();
Model.iterate = async function *(options) {
options.type = options.type || 'all';
const iterable = this.builder.find(options)
//
// Pipe the stream to the proxy stream or the list-stream that will collect
// the data for us and return to the caller
// ExtendQuery returns the priam connection query so we have access to
// those functions
//
stream
.on('error', fn)
//
// Simple Stream to re-transform back to camelCase keys
//
.pipe(through.obj(function (data, enc, callback) {
return void callback(null, self.toInstance(data));
}))
.pipe(proxy ? proxy : ls.obj(fn));
.extendQuery(this.connection.beginQuery())
//
// Allow configurable consistency
//
.consistency(options.consistency || this.readConsistency)
.iterate();
}, done);
//
// We return the stream or undefined
//
return proxy
? readOnly(proxy)
: null;
for await (const row of iterable) {
const instance = this.toInstance(row);
yield this.transform ? this.transform(instance) : instance;
}
};

@@ -333,12 +280,10 @@

*/
['ensure', 'drop'].forEach(function (type) {
var action = [type, 'tables'].join('-');
var name = camelCase(action);
const action = [type, 'tables'].join('-');
const name = camelCase(action);
Model[name] = function (options, callback) {
var self = this;
if (!callback) {
callback = options || function () {};
options = self.options;
options = this.options;
}

@@ -349,5 +294,5 @@

//
options = self.assessOpts(
options = this.assessOpts(
assign(
pick(self.options, ['alter', 'orderBy', 'with']), options
pick(this.options, ['alter', 'orderBy', 'with']), options
)

@@ -362,34 +307,26 @@ );

var statements = options.statements = options.statements
|| (new StatementCollection(self.connection, options.strategy));
const statements = options.statements = options.statements
|| (new StatementCollection(this.connection, options.strategy));
self.perform([action, 'build'].join(':'), options, function (next) {
var statement = self.builder.table(options);
if (statement instanceof Error) {
return void setImmediate(next, statement);
}
this.perform([action, 'build'].join(':'), options, next => {
try {
const statement = this.builder.table(options);
statements.add(statement);
statements.add(statement);
if (!this.schema.lookups) {
return void setImmediate(next);
}
if (!self.schema.lookups) {
return void setImmediate(next);
}
var error;
Object.keys(self.schema.lookupTables).every(function (primaryKey) {
Object.keys(this.schema.lookupTables).forEach(primaryKey => {
// shallow clone
var tableOpts = assign({}, options, { lookupKey: primaryKey });
const tableOpts = assign({}, options, { lookupKey: primaryKey });
const lookupStatement = this.builder.table(tableOpts);
statements.add(lookupStatement);
});
var lookupStatement = self.builder.table(tableOpts);
if (lookupStatement instanceof Error) {
error = lookupStatement;
return false;
}
statements.add(lookupStatement);
return true;
}, self);
return error ? setImmediate(next, error) : next();
}, function (err) {
return void next();
} catch (err) {
setImmediate(next, err);
}
}, err => {
if (err) {

@@ -401,3 +338,3 @@ return void callback(err);

self.perform([action, 'execute'].join(':'), options, function (next) {
this.perform([action, 'execute'].join(':'), options, function (next) {
statements.execute(next);

@@ -429,3 +366,3 @@ }, callback);

*/
var findTypes = {
const findTypes = {
findFirst: 'first',

@@ -437,3 +374,3 @@ findOne: 'one',

var findTypesLookup = Object.keys(findTypes).reduce(function (acc, key) {
const findTypesLookup = Object.keys(findTypes).reduce(function (acc, key) {
acc[findTypes[key]] = true;

@@ -445,5 +382,4 @@ return acc;

Model[method] = function (options, callback) {
var type;
options = options || {};
type = findTypes[method];
const type = findTypes[method];
//

@@ -473,5 +409,5 @@ // Remark: If we are a type of function that returns a single value from cassandra

function errorStream(error) {
var stream = through.obj();
const stream = new PassThrough({ objectMode: true });
setImmediate(stream.emit.bind(stream), 'error', error);
return readOnly(stream);
return stream;
}

@@ -483,3 +419,3 @@

Model.assessOpts = function assessOpts(options) {
var opts = assign({}, options);
const opts = assign({}, options);

@@ -510,3 +446,3 @@ //

Model.normalizeFindOpts = function (options, callback) {
var opts = {};
const opts = {};

@@ -519,6 +455,8 @@ //

if (this.schema.type(options) === 'string') {
opts.conditions = this.schema.generateConditions(options);
return this.schema.type(opts.conditions) === 'error'
? void callback(opts.conditions)
: opts;
try {
opts.conditions = this.schema.generateConditions(options);
return opts;
} catch (err) {
return void callback(err);
}
}

@@ -547,17 +485,12 @@

Model.validateFind = function validateFind(options, callback) {
var stream = !callback || typeof callback !== 'function';
const stream = !callback || typeof callback !== 'function';
const opts = assign({}, options);
function error(err) {
if (!stream) return void setImmediate(callback, err);
return err;
opts.type = options.type || 'all';
if (!findTypesLookup[opts.type]) {
throw new Error(`Improper find type. Must be ${Object.keys(findTypesLookup)}`);
}
var opts = assign({}, options);
if (opts.type && !findTypesLookup[opts.type]) {
return error(new Error('Improper find type. Must be ' + Object.keys(findTypesLookup)));
}
if (!opts.conditions) {
return error(new Error('Conditions must be passed to execute a find query'));
throw new Error('Conditions must be passed to execute a find query');
}

@@ -593,3 +526,3 @@ //

//
var opts = options.isDatastar ? { entities: [options.toJSON(true)] } : assign({}, options);
let opts = options.isDatastar ? { entities: [options.toJSON(true)] } : assign({}, options);

@@ -602,3 +535,3 @@ //

if (!opts.entity && !opts.entities) {
var entity = opts;
const entity = opts;
opts = { entities: !Array.isArray(entity) ? [entity] : entity };

@@ -656,3 +589,8 @@ }

Model.prototype.isValid = function isValid(type) {
return this.validate(type) instanceof Error;
try {
this.validate(type);
return true;
} catch (err) {
return false;
}
};

@@ -680,12 +618,11 @@

var result = this.validate(this.id ? 'update' : 'create');
if (result instanceof Error) {
return void fn(result);
try {
const result = this.validate(this.id ? 'update' : 'create');
return this.Model.update({
entity: result,
previous: this.attributes.previous()
}, fn);
} catch (err) {
return void fn(err);
}
return this.Model.update({
entity: result,
previous: this.attributes.previous()
}, fn);
};

@@ -692,0 +629,0 @@

@@ -1,1056 +0,1050 @@

var
util = require('util'),
const
uuid = require('uuid'),
clone = require('clone'),
snakeCase = require('./snake-case'),
camelCase = require('./camel-case'),
assign = require('object-assign'),
priam = require('priam'),
joi = require('joi-of-cql');
joi = require('joi-of-cql'),
snakeCase = require('./snake-case'),
camelCase = require('./camel-case');
var dataTypes = priam.dataTypes,
const
dataTypes = priam.dataTypes,
TimeUuid = priam.valueTypes.TimeUuid;
var invalidChar = /\W/;
const invalidChar = /\W/;
module.exports = Schema;
class Schema {
/**
* In this method we are going to create a denormalized structure from the
* schema representation that we type out as JSON. This structure should be easy
* to ask questions to and lookup various properties, etc.
*
* @param {string} name- Schema name
* @param {Object} schema - Schema object
* @param {String} schema.name - name of the table
* @param {Array} schema.keys - Primary and secondary keys of a schema
* @param {Object} schema.columns - An object of column names to their type
* @param {Object} schema.maps - An object of the column names that are a special map type with their type as value
* @param {Object} schema.sets - An object of the column names that are a special set type with their type as value
* @constructor
*/
/**
* In this method we are going to create a denormalized structure from the
* schema representation that we type out as JSON. This structure should be easy
* to ask questions to and lookup various properties, etc.
*
* @param {string} name - Schema name
* @param {Object} schema - Schema object
* @param {String} schema.name - name of the table
* @param {Array} schema.keys - Primary and secondary keys of a schema
* @param {Object} schema.columns - An object of column names to their type
* @param {Object} schema.maps - An object of the column names that are a special map type with their type as value
* @param {Object} schema.sets - An object of the column names that are a special set type with their type as value
* @constructor
*/
constructor(name, schema) {
if (!this) return new Schema(name, schema);
function Schema(name, schema) {
if (!this) return new Schema(name, schema);
if (!name || invalidChar.test(name))
throw new Error(`Invalid character in schema name ${name}, use snake_case`);
if (!name || invalidChar.test(name))
throw new Error('Invalid character in schema name ' + name + ', use snake_case');
//
// Remark: We lowercase this by default for easier consistency
//
this.name = name.toLowerCase();
//
// Special operators used for timuuids along with associated functions
// when building queries
//
this.operators = {
gt: '>',
gte: '>=',
lt: '<',
lte: '<='
};
//
// Remark: We lowercase this by default for easier consistency
//
this.name = name.toLowerCase();
//
// Special operators used for timuuids along with associated functions
// when building queries
//
this.operators = {
gt: '>',
gte: '>=',
lt: '<',
lte: '<='
};
//
// A mapping for possible keys that can be passed in for defining order
//
this.orderMap = {
ascending: 'ASC',
asc: 'ASC',
desc: 'DESC',
descending: 'DESC'
};
//
// A mapping for possible keys that can be passed in for defining order
//
this.orderMap = {
ascending: 'ASC',
asc: 'ASC',
desc: 'DESC',
descending: 'DESC'
};
this.cqlFunctions = {
timeuuid: {
gt: time('min'),
gte: time('min'),
lt: time('max'),
lte: time('max')
}
};
this.cqlFunctions = {
timeuuid: {
gt: time('min'),
gte: time('min'),
lt: time('max'),
lte: time('max')
//
// Keys used when defaulting values to generate a number
//
this.generateKeysLookup = ['uuid_v4', 'uuid_empty', 'date_now']
.reduce(function (acc, type) {
acc[type] = defaultValue(type);
return acc;
}, {});
//
// Store a reference to the original joi schema thats passed in
//
this.joi = schema;
//
// We default to having different validators based on the `type`
//
this.validator = {
create: schema,
update: schema
};
this._columns = this.meta = schema.toCql();
this._aliases = schema.aliases() || {};
let keys = schema.clusteringKey();
if (!Array.isArray(keys)) {
keys = [keys];
}
};
//
// Keys used when defaulting values to generate a number
//
this.generateKeysLookup = ['uuid_v4', 'uuid_empty', 'date_now']
.reduce(function (acc, type) {
acc[type] = defaultValue(type);
return acc;
}, {}
);
const pKey = schema.partitionKey();
//
// If there is no partitionKey, throw an error because the schema is not valid
//
if (!pKey || !pKey.length) throw new Error('You must define a partitionKey on your schema');
keys.unshift(pKey);
this._originalKeys = keys;
//
// Set the primary and secondary keys
//
this._primaryKeys = this._originalKeys[0];
this._secondaryKeys = this._originalKeys.slice(1);
//
// The flattened array of all the necessary keys that are required
//
this._keys = this.primaryKeys().concat(this._secondaryKeys);
//
// Indication that we have a compound primary/partition key
//
this.compositePrimary = this.primaryKeys().length >= 2;
//
// Primary or secondary key lookup table
//
this._keysLookup = createLookup(this._keys);
//
// Lookup for primaryKeys
//
this._primaryKeysLookup = createLookup(this.primaryKeys());
//
// Secondary Keys lookup.
// Remark: Not sure if there can be multiple of these but seems possible?
//
this._secondaryKeysLookup = createLookup(this._secondaryKeys);
// Set our list of keys as "columnKeys"
//
this._columnKeys = Object.keys(this.meta);
//
// We realize that we store aliases in a way that is backwards when
// considering it as a lookup table to the type of key actually listed in the
// schema, so lets reverse it to be a proper lookup table for the case that we
// use. Keep around the original as well.
//
// Example: { id: artist_id } is the standard schema.aliases
// We use it for proper lookups as { artist_id: id } in mappedFields
//
// This means that any object passed in with a key `id` will be converted to
// the real value `artist_id` when being inserted. This CAN also be
// considered when retransforming on the way out.
//
//
this._aliasesReverse = Object.keys(this._aliases)
.reduce(function (acc, key) {
const newKey = this._aliases[key];
acc[newKey] = key;
return acc;
}.bind(this), {});
//
// If lookupKeys exist in the schema, setup the proper properties to handle
// those cases
//
const badLookups = this.setLookupKeys(schema.lookupKeys());
if (badLookups) {
throw badLookups;
}
}
//
// Store a reference to the original joi schema thats passed in
// Require some set of keys to generate another joi schema
//
this.joi = schema;
//
// We default to having different validators based on the `type`
//
this.validator = {
create: schema,
update: schema
};
this._columns = this.meta = schema.toCql();
this._aliases = schema.aliases() || {};
var keys = schema.clusteringKey();
if (!Array.isArray(keys)) {
keys = [keys];
requireKeys(keys) {
return keys.reduce((memo, key) => {
if (!this.meta[key] || !this.meta[key].default) {
memo[key] = joi.any().required();
}
return memo;
}, {});
}
var pKey = schema.partitionKey();
//
// If there is no partitionKey, throw an error because the schema is not valid
// Create a separate Lookup table JUST for Lookup tables. Yes confusing I know,
// Object lookup for actual cassandra lookup tables. This should default to
// lookupKeys/lookupTables if it is an object
//
if (!pKey || !pKey.length) throw new Error('You must define a partitionKey on your schema');
setLookupKeys(lookupKeys) {
//
// Return an error to be thrown if we are a compositePrimary key and we are
// given lookupKeys as that is something we do not support
//
if (this.compositePrimary && lookupKeys &&
(this.type(lookupKeys) === 'array' && lookupKeys.length !== 0)
|| (this.type(lookupKeys) === 'object' && Object.keys(lookupKeys).length !== 0)
)
throw new Error('You cannot create a lookup table with a compound key');
keys.unshift(pKey);
this._originalKeys = keys;
lookupKeys = this.fixKeys(lookupKeys) || {};
this.lookupTables = this.type(lookupKeys) === 'object'
? lookupKeys
: lookupKeys.reduce((acc, key) => {
acc[key] = `${this.name}_by_${key}`;
return acc;
}, {});
//
// Set the primary and secondary keys
//
this._primaryKeys = this._originalKeys[0];
this._secondaryKeys = this._originalKeys.slice(1);
lookupKeys = Object.keys(this.lookupTables);
//
// The flattened array of all the necessary keys that are required
//
this._keys = this.primaryKeys().concat(this._secondaryKeys);
//
// Indication that we have a compound primary/partition key
//
this.compositePrimary = this.primaryKeys().length >= 2;
//
// If there are any lookup keys that do not exist on this
// Schema then return an error accordingly
//
const missingLookupKeys = lookupKeys.filter(key => {
return !this.exists(key);
});
//
// Primary or secondary key lookup table
//
this._keysLookup = createLookup(this._keys);
if (missingLookupKeys.length) {
throw new Error(`Invalid lookup keys: ${missingLookupKeys.join(', ')}`);
}
//
// Reverse lookup of key -> tableName to tableName -> key. e.g.
//
// {
// "model_by_prop1": "prop1",
// "model_by_prop2": "prop2"
// }
//
this._reverseLookupKeyMap = lookupKeys
.reduce((acc, key) => {
const table = this.lookupTables[key];
acc[table] = key;
return acc;
}, {});
//
// Set a property on the schema that tells us if we have lookup tables we need
// to write to.
//
this.lookups = !!lookupKeys.length;
//
// Setup the requiredKeys lookup. When we are dealing with lookup tables we
// need to require all the primarykeys associated
//
this._requiredKeysLookup = createLookup(lookupKeys.concat(this.keys()));
this._requiredKeys = Object.keys(this._requiredKeysLookup);
//
// Attach any extra restrictions for the create schema
//
if (this._requiredKeys.length) {
this.validator.create = this.validator.create.concat(
joi.object(
this.requireKeys(this._requiredKeys)
)
);
}
}
//
// Lookup for primaryKeys
// Validate and default things
//
this._primaryKeysLookup = createLookup(this.primaryKeys());
validate(entity, type) {
type = type || 'update';
const { error, value } = joi.validate(entity, this.validator[type], { context: { operation: type } });
if (error) {
throw error;
} else {
return value;
}
}
//
// Secondary Keys lookup.
// Remark: Not sure if there can be multiple of these but seems possible?
// Test if the key exists and returns the transformed key to use if it does,
// otherwise returns undefined. This requires us
// to transform the key to snake_case as well as remap any aliases so we can
// specify the key as a standard camelCase key when passing in any options.
//
this._secondaryKeysLookup = createLookup(this._secondaryKeys);
exists(key) {
const transformed = this.fixKeys(key);
// Set our list of keys as "columnKeys"
//
this._columnKeys = Object.keys(this.meta);
return !this.meta[transformed] ? null : transformed;
}
//
// We realize that we store aliases in a way that is backwards when
// considering it as a lookup table to the type of key actually listed in the
// schema, so lets reverse it to be a proper lookup table for the case that we
// use. Keep around the original as well.
// Transform an entity key to the proper key that cassandra expects (snake_case, unalias)
//
// Example: { id: artist_id } is the standard schema.aliases
// We use it for proper lookups as { artist_id: id } in mappedFields
//
// This means that any object passed in with a key `id` will be converted to
// the real value `artist_id` when being inserted. This CAN also be
// considered when retransforming on the way out.
//
//
this._aliasesReverse = Object.keys(this._aliases)
.reduce(function (acc, key) {
var newKey = this._aliases[key];
acc[newKey] = key;
return acc;
}.bind(this), {});
entityKeyToColumnName(key) {
const mappedKey = snakeCase(key);
const alias = this._aliases[mappedKey];
return alias || mappedKey;
}
//
// If lookupKeys exist in the schema, setup the proper properties to handle
// those cases
// Transform an entity, an object of conditions or an array of fields to have the proper
// keys that cassandra expects (snake_case, unalias)
//
var badLookups = this.setLookupKeys(schema.lookupKeys());
fixKeys(entity) {
entity = entity || {};
if (badLookups) {
throw badLookups;
}
}
if (entity.isDatastar) {
entity = entity.attributes.data;
}
//
// Require some set of keys to generate another joi schema
//
Schema.prototype.requireKeys = function (keys) {
var self = this;
return keys.reduce(function (memo, key) {
if (!self.meta[key] || !self.meta[key].default) {
memo[key] = joi.any().required();
if (this.type(entity) === 'object') {
return Object.keys(entity).reduce((acc, key) => {
//
// If we have an alias, check it and convert it to what we expect in C*
//
const mappedKey = this.entityKeyToColumnName(key);
acc[mappedKey] = entity[key];
return acc;
}, {});
}
return memo;
}, {});
};
//
// If we have an array, this is an array of fields for doing "selects"
//
if (Array.isArray(entity)) {
return entity.map(this.fixKeys, this);
}
//
// Create a separate Lookup table JUST for Lookup tables. Yes confusing I know,
// Object lookup for actual cassandra lookup tables. This should default to
// lookupKeys/lookupTables if it is an object
//
Schema.prototype.setLookupKeys = function (lookupKeys) {
var self = this;
//
// IDK why this would happen but this is an easy case
//
if (this.type(entity) === 'string') {
const mapped = snakeCase(entity);
return this._aliases[mapped]
? this._aliases[mapped]
: mapped || entity;
}
//
// If we meet 0 conditions we just return what we got, this maybe should be an
// error? Idk, this is just a weird thing in general
//
return entity;
}
//
// Return an error to be thrown if we are a compositePrimary key and we are
// given lookupKeys as that is something we do not support
// Transform in the opposite direction of transform by remapping snakeCase back
// to camelCase
//
if (this.compositePrimary && lookupKeys &&
(this.type(lookupKeys) === 'array' && lookupKeys.length !== 0)
|| (this.type(lookupKeys) === 'object' && Object.keys(lookupKeys).length !== 0))
return new Error('You cannot create a lookup table with a compound key');
toCamelCase(entity) {
entity = entity || {};
lookupKeys = this.fixKeys(lookupKeys) || {};
this.lookupTables = this.type(lookupKeys) === 'object'
? lookupKeys
: lookupKeys.reduce(function (acc, key) {
acc[key] = self.name + '_by_' + key;
return acc;
}, {});
if (this.type(entity) === 'object') {
return Object.keys(entity).reduce((acc, key) => {
//
// If we have an alias, check it and convert it to what we
const mappedKey = camelCase(this._aliasesReverse[key] || key);
lookupKeys = Object.keys(this.lookupTables);
acc[mappedKey] = entity[key];
//
// If there are any lookup keys that do not exist on this
// Schema then return an error accordingly
//
var missingLookupKeys = lookupKeys.filter(function (key) {
return !this.exists(key);
}, this);
return acc;
}, {});
}
if (missingLookupKeys.length) {
return new Error('Invalid lookup keys: ' + missingLookupKeys.join(', '));
//
// If we have an array, this is an array of fields for doing "selects"
//
if (Array.isArray(entity)) {
return entity.map(function (field) {
return camelCase(this._aliasesReverse[field] || field);
}, this);
}
//
// IDK why this would happen but this is an easy case
//
if (this.type(entity) === 'string') {
return camelCase(this._aliasesReverse[entity] || entity);
}
//
// If we meet 0 conditions we just return what we got, this maybe should be an
// error? Idk, this is just a weird thing in general
//
return entity;
}
//
// Reverse lookup of key -> tableName to tableName -> key. e.g.
// Generate a conditions object given a value assumed to be the primary key
//
// {
// "model_by_prop1": "prop1",
// "model_by_prop2": "prop2"
// }
//
this._reverseLookupKeyMap = lookupKeys
.reduce(function (acc, key) {
var table = self.lookupTables[key];
acc[table] = key;
generateConditions(value) {
const primaries = this.primaryKeys();
if (primaries.length > 1) {
throw new Error(`More conditions required ${primaries.join(', ')}`);
}
//
// Return an object with the single primaryKey with the correct case assigned
// to the value passed in. Allows us to support passing a string for findOne
//
return primaries.reduce((acc, key) => {
acc[this.toCamelCase(key)] = value;
return acc;
}, {});
}
//
// Set a property on the schema that tells us if we have lookup tables we need
// to write to.
// Return both primary and secondary keys
//
this.lookups = !!lookupKeys.length;
keys() {
return this._keys;
}
//
// Setup the requiredKeys lookup. When we are dealing with lookup tables we
// need to require all the primarykeys associated
// Returns whether or not it is a primary or secondary key
//
this._requiredKeysLookup = createLookup(lookupKeys.concat(this.keys()));
this._requiredKeys = Object.keys(this._requiredKeysLookup);
isKey(key) {
return !!this._keysLookup[key];
}
//
// Attach any extra restrictions for the create schema
// Return the column type for the given
//
if (this._requiredKeys.length) {
this.validator.create = this.validator.create.concat(
joi.object(
this.requireKeys(this._requiredKeys)
)
);
fieldMeta(field) {
return this.meta[field];
}
};
//
// Validate and default things
//
Schema.prototype.validate = function (entity, type) {
type = type || 'update';
var result = joi.validate(entity, this.validator[type], { context: { operation: type }});
return result.error || result.value;
};
prepareForUse(data) {
return this.convert(this.fixKeys(data), 'deserialize');
}
//
// Test if the key exists and returns the transformed key to use if it does,
// otherwise returns undefined. This requires us
// to transform the key to snake_case as well as remap any aliases so we can
// specify the key as a standard camelCase key when passing in any options.
//
Schema.prototype.exists = function (key) {
var transformed = this.fixKeys(key);
return !this.meta[transformed] ? null : transformed;
};
//
// Transform an entity key to the proper key that cassandra expects (snake_case, unalias)
//
Schema.prototype.entityKeyToColumnName = function (key) {
var mappedKey = snakeCase(key);
var alias = this._aliases[mappedKey];
return alias || mappedKey;
};
//
// Transform an entity, an object of conditions or an array of fields to have the proper
// keys that cassandra expects (snake_case, unalias)
//
Schema.prototype.fixKeys = function (entity) {
entity = entity || {};
if (entity.isDatastar) {
entity = entity.attributes.data;
// unknown use case
prepareForSerialization(data) {
return this.convert(this.fixKeys(data), 'serialize');
}
if (this.type(entity) === 'object') {
return Object.keys(entity).reduce(function (acc, key) {
//
// If we have an alias, check it and convert it to what we expect in C*
//
var mappedKey = this.entityKeyToColumnName(key);
acc[mappedKey] = entity[key];
return acc;
}.bind(this), {});
convert(data, converter) {
const meta = this.meta;
Object.keys(meta).forEach(function (key) {
if (meta[key][converter]) {
try {
data[key] = meta[key][converter](data[key]);
} catch (e) {
// ignored on purpose
// we should log this invalid data
}
}
});
return data;
}
//
// If we have an array, this is an array of fields for doing "selects"
// Return the primaryKey based on what type it is which is probably an array.
// Handle the other case as well
//
if (Array.isArray(entity)) {
return entity.map(this.fixKeys, this);
primaryKeys() {
return Array.isArray(this._primaryKeys) && this._primaryKeys.length
? this._primaryKeys
: [this._primaryKeys];
}
//
// IDK why this would happen but this is an easy case
//
if (this.type(entity) === 'string') {
var mapped = snakeCase(entity);
return this._aliases[mapped]
? this._aliases[mapped]
: mapped || entity;
secondaryKeys() {
return this._secondaryKeys;
}
fields() {
return this._columnKeys;
}
fieldString(fieldList) {
if (!Array.isArray(fieldList) || !fieldList.length) {
fieldList = this.fields();
}
return fieldList
.map(function (fieldName) {
return fieldName && (`"${fieldName}"`);
})
.join(', ');
}
//
// If we meet 0 conditions we just return what we got, this maybe should be an
// error? Idk, this is just a weird thing in general
// Return all fields, we are going to default to dealing with this as camelCase
//
return entity;
mappedFields() {
if (!this._mappedFields) {
this._mappedFields = this._columnKeys.map(key => {
//
// CamelCase and replace alias with actual key name if it exists
//
return camelCase(this._aliasesReverse[key] || key);
});
}
};
//
// Transform in the opposite direction of transform by remapping snakeCase back
// to camelCase
//
Schema.prototype.toCamelCase = function (entity) {
entity = entity || {};
if (this.type(entity) === 'object') {
return Object.keys(entity).reduce(function (acc, key) {
//
// If we have an alias, check it and convert it to what we
var mappedKey = camelCase(this._aliasesReverse[key] || key);
acc[mappedKey] = entity[key];
return acc;
}.bind(this), {});
return this._mappedFields;
}
//
// If we have an array, this is an array of fields for doing "selects"
// Appropriate typeof checking
//
if (Array.isArray(entity)) {
return entity.map(function (field) {
return camelCase(this._aliasesReverse[field] || field);
}, this);
type(of) {
return Object.prototype.toString.call(of).slice(8, -1).toLowerCase();
}
//
// IDK why this would happen but this is an easy case
// Remark: Create conditions that are meant to be directed at the primary table if there
// is a lookup table situtation. We filter based on the remove logic and do not
// parse into conditionals as this gets passed directly to find
//
if (this.type(entity) === 'string') {
return camelCase(this._aliasesReverse[entity] || entity);
filterPrimaryConditions(conditions) {
return this.toCamelCase(this.filterRemoveConditions(this.fixKeys(conditions)));
}
//
// If we meet 0 conditions we just return what we got, this maybe should be an
// error? Idk, this is just a weird thing in general
// Evaluate if we have sufficient conditions for the remove we are executing and
// return them
//
return entity;
};
createRemoveConditions(conditions, table) {
const transformed = this.fixKeys(conditions);
//
// If we are a lookup table and insufficient conditions are passed to execute
// the queries to ALL the lookup tables, just error for simplicity now.
//
if (!this.sufficientRemoveConditions(transformed)) {
throw new Error('Must pass in all primary keys when using lookup tables');
}
//
// Generate a conditions object given a value assumed to be the primary key
//
Schema.prototype.generateConditions = function (value) {
var self = this;
var primaries = this.primaryKeys();
conditions = this.filterRemoveConditions(transformed, table);
if (primaries.length > 1) {
return new Error(util.format('More conditions required %s', primaries.join(', ')));
const conditionals = this.parseConditions(conditions);
conditionals.table = table;
return conditionals;
}
//
// Return an object with the single primaryKey with the correct case assigned
// to the value passed in. Allows us to support passing a string for findOne
// Evaluate if we have sufficient conditions for the remove we are executing and
// return them
//
return primaries.reduce(function (acc, key) {
acc[self.toCamelCase(key)] = value;
return acc;
}, {});
};
//
// Return both primary and secondary keys
//
Schema.prototype.keys = function () {
return this._keys;
};
//
// Returns whether or not it is a primary or secondary key
//
Schema.prototype.isKey = function (key) {
return !!this._keysLookup[key];
};
//
// Return the column type for the given
//
Schema.prototype.fieldMeta = function (field) {
return this.meta[field];
};
Schema.prototype.prepareForUse = function (data) {
return this.convert(this.fixKeys(data), 'deserialize');
};
// unknown use case
Schema.prototype.prepareForSerialization = function (data) {
return this.convert(this.fixKeys(data), 'serialize');
};
Schema.prototype.convert = function convert(data, converter) {
var meta = this.meta;
Object.keys(meta).forEach(function (key) {
if (meta[key][converter]) {
try {
data[key] = meta[key][converter](data[key]);
} catch (e) {
// ignored on purpose
// we should log this invalid data
}
createUpdateConditions(conditions, table) {
const transformed = this.fixKeys(conditions);
//
// If we are a lookup table and insufficient conditions are passed to execute
// the queries to ALL the lookup tables, just error for simplicity now. Also
// handle the case where we do not have sufficient keys for a query, (need all
// primary keys or both secondary and primary)
//
if (!this.sufficientUpdateConditions(transformed)) {
throw new Error(`All necessary primary keys must be passed in, given: ${JSON.stringify(conditions)}`);
}
});
return data;
};
//
// Return the primaryKey based on what type it is which is probably an array.
// Handle the other case as well
//
Schema.prototype.primaryKeys = function () {
return Array.isArray(this._primaryKeys) && this._primaryKeys.length
? this._primaryKeys
: [this._primaryKeys];
};
conditions = this.filterRemoveConditions(transformed, table);
Schema.prototype.secondaryKeys = function () {
return this._secondaryKeys;
};
const conditionals = this.parseConditions(conditions);
conditionals.table = table;
Schema.prototype.fields = function () {
return this._columnKeys;
};
Schema.prototype.fieldString = function (fieldList) {
if (!Array.isArray(fieldList) || !fieldList.length) {
fieldList = this.fields();
return conditionals;
}
return fieldList
.map(function (fieldName) {
return fieldName && ('"' + fieldName + '"');
})
.join(', ');
};
//
// Return all fields, we are going to default to dealing with this as camelCase
//
Schema.prototype.mappedFields = function () {
if (!this._mappedFields) {
this._mappedFields = this._columnKeys.map(function (key) {
//
// CamelCase and replace alias with actual key name if it exists
//
return camelCase(this._aliasesReverse[key] || key);
}, this);
}
return this._mappedFields;
};
//
// Appropriate typeof checking
//
Schema.prototype.type = function type(of) {
return Object.prototype.toString.call(of).slice(8, -1).toLowerCase();
};
//
// Remark: Create conditions that are meant to be directed at the primary table if there
// is a lookup table situtation. We filter based on the remove logic and do not
// parse into conditionals as this gets passed directly to find
//
Schema.prototype.filterPrimaryConditions = function (conditions) {
return this.toCamelCase(this.filterRemoveConditions(this.fixKeys(conditions)));
};
//
// Evaluate if we have sufficient conditions for the remove we are executing and
// return them
//
Schema.prototype.createRemoveConditions = function (conditions, table) {
var transformed = this.fixKeys(conditions);
//
// If we are a lookup table and insufficient conditions are passed to execute
// the queries to ALL the lookup tables, just error for simplicity now.
// Ensure we have sufficient keys to do an update operation
//
if (!this.sufficientRemoveConditions(transformed)) {
return new Error('Must pass in all primary keys when using lookup tables');
sufficientUpdateConditions(conditions) {
const keys = this.lookups ? this.keys().concat(Object.keys(this.lookupTables)) : this.keys();
return keys.every(function (key) {
return !!conditions[key];
});
}
conditions = this.filterRemoveConditions(transformed, table);
var conditionals = this.parseConditions(conditions);
conditionals.table = table;
return conditionals;
};
//
// Evaluate if we have sufficient conditions for the remove we are executing and
// return them
//
Schema.prototype.createUpdateConditions = function (conditions, table) {
var transformed = this.fixKeys(conditions);
//
// If we are a lookup table and insufficient conditions are passed to execute
// the queries to ALL the lookup tables, just error for simplicity now. Also
// handle the case where we do not have sufficient keys for a query, (need all
// primary keys or both secondary and primary)
// DE-Null the entity, meaning translate known types into our defined null
// equivalents. We expect to receive a fully transformed object with snake case
// keys here. We use a for loop since we do too many iterations over the object
// in this process
//
if (!this.sufficientUpdateConditions(transformed)) {
return new Error(util.format('All necessary primary keys must be passed in, given: %j', conditions));
}
deNull(entity) {
const keys = Object.keys(entity);
for (let i = 0; i < keys.length; i++) {
const key = keys[i];
const value = entity[key];
const meta = this.fieldMeta(key);
if (!meta) {
throw new Error(`${key} is not found in the schema`);
}
conditions = this.filterRemoveConditions(transformed, table);
entity[key] = this.nullToValue(meta, value);
}
var conditionals = this.parseConditions(conditions);
conditionals.table = table;
return entity;
}
return conditionals;
};
hasAllRequiredKeys(entity, previous) {
if (!entity) {
return false;
}
//
// Ensure we have sufficient keys to do an update operation
//
Schema.prototype.sufficientUpdateConditions = function (conditions) {
var keys = this.lookups ? this.keys().concat(Object.keys(this.lookupTables)) : this.keys();
return keys.every(function (key) {
return !!conditions[key];
});
};
//
// DE-Null the entity, meaning translate known types into our defined null
// equivalents. We expect to receive a fully transformed object with snake case
// keys here. We use a for loop since we do too many iterations over the object
// in this process
//
Schema.prototype.deNull = function (entity) {
var keys = Object.keys(entity);
var error;
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
var value = entity[key];
var meta = this.fieldMeta(key);
if (!meta) {
error = new Error(util.format('%s is not found in the schema', key));
break;
try {
this.validate(assign(clone(entity), previous || {}), 'update');
return true;
} catch (err) {
return false;
}
entity[key] = this.nullToValue(meta, value);
}
return error || entity;
};
//
// Adjust detected values that are `null` and map them to a `null-like` value.
// TODO: Should we iterate through maps and sets and adjust accordingly as well?
//
nullToValue(meta, value) {
const type = meta.type;
Schema.prototype.hasAllRequiredKeys = function (entity, previous) {
return entity && this.type(this.validate(assign(clone(entity), previous || {}), 'update')) !== 'error';
};
if ((type === 'text' || type === 'ascii') && value === null) {
// null text values will create tombstones in Cassandra
// We will write a null string instead.
return '\x00';
}
if ((type === 'uuid' || type === 'timeuuid') && isBadUuid(value)) {
// null uuid values will create tombstones in Cassandra
// We will write a zeroed uuid instead.
return this.generateKeysLookup.uuid_empty();
}
if (type === 'timestamp' && value === null) {
// null timestamp values will create tombstones in Cassandra
// We will write a zero time instead.
return new Date(0);
}
if (type === 'map') {
return Object.keys(value).reduce((memo, key) => {
memo[key] = this.nullToValue({ type: meta.mapType[1] }, value[key]);
return memo;
}, {});
}
if (type === 'set') {
// Sets are an odd edge case here, it can be an array or an object who's
// values are sit in an add and/or remove property. This means we need to
// a bit more work updating this data structure.
if (this.type(value) === 'object') {
['add', 'remove'].forEach(method => {
if (method in value) value[method] = value[method].map(value => {
return this.nullToValue({ type: meta.setType }, value);
});
});
//
// detect both empty string and null as a bad uuid value since cassandra will
// give us weird errors if we try and insert an empty string
//
function isBadUuid(value) {
return value === null || (typeof value === 'string' && value.length === 0);
}
//
// Adjust detected values that are `null` and map them to a `null-like` value.
// TODO: Should we iterate through maps and sets and adjust accordingly as well?
//
Schema.prototype.nullToValue = function (meta, value) {
var type = meta.type,
self = this;
if ((type === 'text' || type === 'ascii') && value === null) {
// null text values will create tombstones in Cassandra
// We will write a null string instead.
return '\x00';
}
if ((type === 'uuid' || type === 'timeuuid') && isBadUuid(value)) {
// null uuid values will create tombstones in Cassandra
// We will write a zeroed uuid instead.
return this.generateKeysLookup.uuid_empty();
}
if (type === 'timestamp' && value === null) {
// null timestamp values will create tombstones in Cassandra
// We will write a zero time instead.
return new Date(0);
}
if (type === 'map') {
return Object.keys(value).reduce(function reduce(memo, key) {
memo[key] = self.nullToValue({ type: meta.mapType[1] }, value[key]);
return memo;
}, {});
}
if (type === 'set') {
// Sets are an odd edge case here, it can be an array or an object who's
// values are sit in an add and/or remove property. This means we need to
// a bit more work updating this data structure.
if (this.type(value) === 'object') {
['add', 'remove'].forEach(function each(method) {
if (method in value) value[method] = value[method].map(function map(value) {
return self.nullToValue({ type: meta.setType }, value);
});
return value;
}
return value.map(value => {
return this.nullToValue({ type: meta.setType }, value);
});
return value;
}
return value.map(function map(value) {
return self.nullToValue({ type: meta.setType }, value);
});
if (type === 'list') {
if (this.type(value) === 'object') {
['prepend', 'append', 'remove'].forEach(method => {
if (method in value) value[method] = value[method].map(value => {
return this.nullToValue({ type: meta.listType }, value);
});
});
}
if (type === 'list') {
if (this.type(value) === 'object') {
['prepend', 'append', 'remove'].forEach(function each(method) {
if (method in value) value[method] = value[method].map(function map(value) {
return self.nullToValue({ type: meta.listType }, value);
if (value.index && this.type(value.index) === 'object') {
value.index = Object.keys(value.index).reduce((acc, idx) => {
acc[idx] = this.nullToValue({ type: meta.listType }, value.index[idx]);
return acc;
}, {});
}
} else {
return value.map(value => {
return this.nullToValue({ type: meta.setType }, value);
});
});
if (value.index && this.type(value.index) === 'object') {
value.index = Object.keys(value.index).reduce(function (acc, idx) {
acc[idx] = self.nullToValue({ type: meta.listType }, value.index[idx]);
return acc;
}, {});
}
} else {
return value.map(function map(value) {
return self.nullToValue({ type: meta.setType }, value);
});
}
return value;
}
return value;
//
// RE-Null the entity. This translates the defined null equivalents
// into an actual null value for the consumer to use.
//
reNull(entity) {
const keys = Object.keys(entity);
};
for (let i = 0; i < keys.length; i++) {
const key = keys[i];
if (this.requiresNullConversion(key)) {
entity[key] = this.valueToNull(entity[key]);
} else if (!this.isKey(key)) {
entity[key] = this.nullToValue(this.fieldMeta(key), entity[key]);
}
}
//
// RE-Null the entity. This translates the defined null equivalents
// into an actual null value for the consumer to use.
//
Schema.prototype.reNull = function (entity) {
var keys = Object.keys(entity);
return entity;
}
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
if (this.requiresNullConversion(key)) {
entity[key] = this.valueToNull(entity[key]);
} else if (!this.isKey(key)) {
entity[key] = this.nullToValue(this.fieldMeta(key), entity[key]);
requiresNullConversion(columnName) {
if (!this.meta[columnName]) {
columnName = this.entityKeyToColumnName(columnName);
}
const metaData = this.fieldMeta(columnName);
const isKey = this.isKey(columnName);
if (isKey) { return false; }
if (!metaData || typeof metaData.nullConversion !== 'boolean') { return true; }
return metaData.nullConversion;
}
return entity;
};
Schema.prototype.requiresNullConversion = function (columnName) {
if (!this.meta[columnName]) {
columnName = this.entityKeyToColumnName(columnName);
//
// Detect our `null-like` values and return null if applicable
//
valueToNull(value) {
return valueToNullImpl(value, this.type.bind(this), new WeakSet());
}
var metaData = this.fieldMeta(columnName);
var isKey = this.isKey(columnName);
if (isKey) { return false; }
if (!metaData || typeof metaData.nullConversion !== 'boolean') { return true; }
return metaData.nullConversion;
};
//
// Detect our `null-like` values and return null if applicable
//
Schema.prototype.valueToNull = function (value) {
return valueToNullImpl(value, this.type.bind(this), new WeakSet());
};
//
// Assess if we have sufficient conditions during our pre-remove check from
// a table with a lookup table. This will let our user know if they are trying
// to do something they can't do based on how they defined lookup tables
//
sufficientRemoveConditions(conditions) {
const keys = this.lookups
? Object.keys(this.lookupTables).concat(this.primaryKeys())
: this.primaryKeys();
//
// Assess if we have sufficient conditions during our pre-remove check from
// a table with a lookup table. This will let our user know if they are trying
// to do something they can't do based on how they defined lookup tables
//
Schema.prototype.sufficientRemoveConditions = function (conditions) {
var keys = this.lookups
? Object.keys(this.lookupTables).concat(this.primaryKeys())
: this.primaryKeys();
return keys.every(function (key) {
return !!conditions[key];
});
}
return keys.every(function (key) {
return !!conditions[key];
});
};
//
//
// These are conditions specific for the remove functionality in the case where
// we are removing from a bunch of lookup tables. Also handles the generic case
//
filterRemoveConditions(conditions, table) {
//
// Filter the conditions and pluck the appropriate primary key and secondary
// keys based on the table
//
return Object.keys(conditions)
.filter(key => {
//
// Only allow secondary keys or the appropriate primary key. If a table is
// passed, we check the lookup table keys as well
//
return (table
? this._reverseLookupKeyMap[table] === key
: this._primaryKeysLookup[key])
|| this._secondaryKeysLookup[key];
})
.reduce(function (acc, key) {
acc[key] = conditions[key];
return acc;
}, {});
}
//
//
// These are conditions specific for the remove functionality in the case where
// we are removing from a bunch of lookup tables. Also handles the generic case
//
Schema.prototype.filterRemoveConditions = function (conditions, table) {
//
// Filter the conditions and pluck the appropriate primary key and secondary
// keys based on the table
// Remark: Transform the keys and then filter out any keys that are not the
// primary/secondary keys that are used as conditions to query on (creating
// the where clause)
//
return Object.keys(conditions)
.filter(function (key) {
//
// Only allow secondary keys or the appropriate primary key. If a table is
// passed, we check the lookup table keys as well
//
return (table
? this._reverseLookupKeyMap[table] === key
: this._primaryKeysLookup[key])
|| this._secondaryKeysLookup[key];
}, this)
.reduce(function (acc, key) {
acc[key] = conditions[key];
return acc;
}, {});
};
filterConditions(conditions) {
let table;
const primaries = [];
//
// Remark: Transform the keys and then filter out any keys that are not the
// primary/secondary keys that are used as conditions to query on (creating
// the where clause)
//
Schema.prototype.filterConditions = function (conditions) {
var table;
var primaries = [];
const filtered = Object.keys(conditions)
.filter(key => {
//
// If it exists as a primary or secondary key, we keep it and dont filter
//
const exists = !!this._keysLookup[key];
if (this._primaryKeysLookup[key]) primaries.push(key);
//
// Check if its part of a lookup table
//
table = this.lookupTables[key];
if (table) primaries.push(key);
var filtered = Object.keys(conditions)
.filter(function (key) {
//
// If it exists as a primary or secondary key, we keep it and dont filter
//
var exists = !!this._keysLookup[key];
if (this._primaryKeysLookup[key]) primaries.push(key);
//
// Check if its part of a lookup table
//
table = this.lookupTables[key];
if (table) primaries.push(key);
return exists || !!table;
})
.reduce(function (acc, key) {
acc[key] = conditions[key];
return acc;
}, {});
return exists || !!table;
}, this)
.reduce(function (acc, key) {
acc[key] = conditions[key];
return acc;
}, {});
//
// Return an error if there are more than one primary key being used,
// meaning we have conflicting lookup tables. Technically we dont need to
// error, we just filter out or delete one of the keys from the filtered
// object
//
if (primaries.length > 1 && !this.compositePrimary) {
throw new Error(`There can only be 1 primary key in a query, found ${primaries.length} ${primaries}`);
}
//
// Return an error if there are more than one primary key being used,
// meaning we have conflicting lookup tables. Technically we dont need to
// error, we just filter out or delete one of the keys from the filtered
// object
//
if (primaries.length > 1 && !this.compositePrimary) {
return new Error('There can only be 1 primary key in a query, found ' + primaries.length + ' ' + primaries);
return { table: table, conditions: filtered };
}
return { table: table, conditions: filtered };
};
//
// Create conditions based on an entity or conditions. Optional type paremeter
// can be passed as there is one case we don't want lookup table primary keys to be
// considered valid conditions (remove);
//
Schema.prototype.createConditions = function (conditions) {
var opts = this.filterConditions(this.fixKeys(conditions));
//
// We can error if we try and specify 2 keys which are for conflicting lookup
// tables. We can only query one
// Create conditions based on an entity or conditions. Optional type paremeter
// can be passed as there is one case we don't want lookup table primary keys to be
// considered valid conditions (remove);
//
if (this.type(opts) === 'error') {
return opts;
createConditions(conditions) {
const opts = this.filterConditions(this.fixKeys(conditions));
const conditionals = this.parseConditions(opts.conditions);
//
// Pass back the table so we can override the standard table after we have
// parsed the conditions
//
conditionals.table = opts.table;
return conditionals;
}
var conditionals = this.parseConditions(opts.conditions);
//
// Pass back the table so we can override the standard table after we have
// parsed the conditions
// Parse the conditions into array objects to be used later on
//
conditionals.table = opts.table;
return conditionals;
};
//
// Parse the conditions into array objects to be used later on
//
Schema.prototype.parseConditions = function (conditions) {
//
// Create a data structure
//
var conditionals = {
parseConditions(conditions) {
//
// The keys that get mapped into the where clause
// Create a data structure
//
query: [],
const conditionals = {
//
// The keys that get mapped into the where clause
//
query: [],
//
// Hints based on parameters
//
hints: [],
//
// Actual parameter values associated with the query
//
params: [],
//
// Special routing indexes for parameters that are primaryKeys
//
routingIndexes: [],
//
// A simple array of field names (i.e. key names) for
// bookkeeping / logging purposes.
//
fields: []
};
//
// Hints based on parameters
// Create an array of `where` objects which have a `query` and `param`
// property as well as the original `field` and `value` i guess?
//
hints: [],
//
// Actual parameter values associated with the query
//
params: [],
//
// Special routing indexes for parameters that are primaryKeys
//
routingIndexes: [],
//
// A simple array of field names (i.e. key names) for
// bookkeeping / logging purposes.
//
fields: []
};
Object.keys(conditions).forEach(field => {
const value = conditions[field];
conditionals.fields.push(field);
conditionals.query.push(this._getQuery(field, value));
//
// Do valueOf on the params to get the value expected by priam.
// Whats returned by this._getParams is actually the proper value for the
// query
//
let params = this._getParams(field, value);
params = Array.isArray(params) ? params : [params];
params.forEach(function (param) {
conditionals.params.push(this.valueOf(field, param));
}, this);
});
return conditionals;
}
//
// Create an array of `where` objects which have a `query` and `param`
// property as well as the original `field` and `value` i guess?
// Return the params based on the given entity
//
Object.keys(conditions).forEach(function (field) {
var value = conditions[field];
conditionals.fields.push(field);
conditionals.query.push(this._getQuery(field, value));
getValues(entity, fields) {
fields = fields || this.fields();
//
// Do valueOf on the params to get the value expected by priam.
// Whats returned by this._getParams is actually the proper value for the
// query
// Populate all fields (i.e. columns) with
// any values from the entity. If a value for
// a particular column is not present we set
// it EXPLICITLY to `null`.
//
var params = this._getParams(field, value);
params = Array.isArray(params) ? params : [params];
params.forEach(function (param) {
conditionals.params.push(this.valueOf(field, param));
}, this);
return fields.map(field => {
let value = null;
if (Object.prototype.hasOwnProperty.call(entity, field)) {
value = entity[field];
}
}, this);
return this.valueOf(field, value);
});
}
return conditionals;
};
//
// Bit of a hack that returns the data structure expected by priam
//
valueOf(field, value, type) {
return {
value: value,
hint: this._mapFieldHint(
type ? type : this._getFieldHint(field)
),
isRoutingKey: this.primaryKeys().indexOf(field) !== -1
};
}
//
// Return the params based on the given entity
//
Schema.prototype.getValues = function (entity, fields) {
fields = fields || this.fields();
//
// Populate all fields (i.e. columns) with
// any values from the entity. If a value for
// a particular column is not present we set
// it EXPLICITLY to `null`.
// Add the column names and aliases from the schema definition as
// property getters/setters for the data being modeled by this object
//
return fields.map(function (field) {
var value = null;
if (entity.hasOwnProperty(field)) {
value = entity[field];
}
buildProperties() {
const columns = Object.keys(this.meta);
const aliasesOf = this._aliasesReverse;
return this.valueOf(field, value);
}, this);
};
const definitions = columns.reduce(function (memo, name) {
name = camelCase(aliasesOf[name] || name);
memo[name] = {
get() {
return this.attributes.get(name);
},
set(value) {
return this.attributes.set(name, value);
},
enumerable: true,
configurable: true
};
return memo;
}, {});
//
// Bit of a hack that returns the data structure expected by priam
//
Schema.prototype.valueOf = function (field, value, type) {
return {
value: value,
hint: this._mapFieldHint(
type ? type : this._getFieldHint(field)
),
isRoutingKey: this.primaryKeys().indexOf(field) !== -1
};
};
return definitions;
}
//
// Add the column names and aliases from the schema definition as
// property getters/setters for the data being modeled by this object
//
Schema.prototype.buildProperties = function () {
var columns = Object.keys(this.meta);
var aliasesOf = this._aliasesReverse;
//
//
_getQuery(field, values) {
let value;
if (Array.isArray(values)) {
if (values.length > 1) {
return `${field} IN (?${new Array(values.length).join(', ?')})`;
}
value = values[0];
} else if (this.type(values) === 'object') {
value = Object.keys(values)
.map(name => {
const op = this.operators[name];
var definitions = columns.reduce(function (memo, name) {
name = camelCase(aliasesOf[name] || name);
memo[name] = {
get: function () {
return this.attributes.get(name);
},
set: function (value) {
return this.attributes.set(name, value);
},
enumerable: true,
configurable: true
};
return memo;
}, {});
return op
? `${field} ${op} ?`
: null;
})
.filter(Boolean)
.join(' AND ');
return definitions;
};
//
//
Schema.prototype._getQuery = function (field, values) {
var value;
if (Array.isArray(values)) {
if (values.length > 1) {
return util.format('%s IN (%s)', field, '?' + new Array(values.length).join(', ?'));
return value || null;
} else {
value = values;
}
value = values[0];
} else if (this.type(values) === 'object') {
value = Object.keys(values)
.map(function (name) {
var op = this.operators[name];
return op
? util.format('%s %s ?', field, op)
: null;
}, this)
.filter(Boolean)
.join(' AND ');
return value || null;
} else {
value = values;
return this.type(value) === 'string' || this.type(value) === 'number'
? `${field} = ?`
: null;
}
return this.type(value) === 'string' || this.type(value) === 'number'
? util.format('%s = ?', field)
: null;
};
//
// Transform parameters based on the field passed in and the value associated
// with the field
//
_getParams(field, values) {
let value;
//
// Transform parameters based on the field passed in and the value associated
// with the field
//
Schema.prototype._getParams = function (field, values) {
var value;
if (Array.isArray(values)) {
values = values.slice(0);
if (values.length > 1) {
return values;
}
value = values[0];
} else if (this.type(values) === 'object') {
value = Object.keys(values)
.map(function (name) {
const op = this.operators[name];
if (!op) {
return null;
}
if (Array.isArray(values)) {
values = values.slice(0);
if (values.length > 1) {
return values;
const type = this.meta[field].type;
return convertRangeType(this.cqlFunctions[type], values[name], name);
}, this)
.filter(Boolean);
if (value.length) {
return value;
}
} else {
value = values;
}
value = values[0];
} else if (this.type(values) === 'object') {
value = Object.keys(values)
.map(function (name) {
var op = this.operators[name],
type = this.meta[field].type;
if (!op) {
return null;
}
return this.type(value) === 'string' || this.type(value) === 'number' ? value : null;
}
return convertRangeType(this.cqlFunctions[type], values[name], name);
}, this)
.filter(Boolean);
if (value.length) {
return value;
}
} else {
value = values;
//
// Get the proper hint code from the internal cassandra driver to pass in
//
_mapFieldHint(hint) {
const hintType = dataTypes[hint] ? dataTypes[hint] : hint;
return this.type(hintType) === 'string'
? dataTypes.getByName(hintType)
: hintType;
}
return this.type(value) === 'string' || this.type(value) === 'number' ? value : null;
};
_getFieldHint(field) {
const meta = this.meta[field];
if (!meta || !this._isString(meta.type)) return null;
//
// Get the proper hint code from the internal cassandra driver to pass in
//
Schema.prototype._mapFieldHint = function (hint) {
var hintType = dataTypes[hint] ? dataTypes[hint] : hint;
return this.type(hintType) === 'string'
? dataTypes.getByName(hintType)
: hintType;
};
//
// Validate and return hints for various types
//
if (meta.type === 'map') {
return Array.isArray(meta.mapType)
&& meta.mapType.length === 2
&& meta.mapType.every(this._isString, this)
? `map<${meta.mapType[0]},${meta.mapType[1]}>`
: null;
}
Schema.prototype._getFieldHint = function (field) {
var meta = this.meta[field];
var cType;
//
// Handle set and lists which are formatted the same
//
if (['set', 'list'].indexOf(meta.type) !== -1) {
const cType = meta[`${meta.type}Type`];
return this._isString(cType)
? `${meta.type}<${cType}>`
: null;
}
if (!meta || !this._isString(meta.type)) return null;
//
// Validate and return hints for various types
//
if (meta.type === 'map') {
return Array.isArray(meta.mapType)
&& meta.mapType.length === 2
&& meta.mapType.every(this._isString, this)
? util.format('map<%s,%s>', meta.mapType[0], meta.mapType[1])
: null;
return meta.type;
}
//
// Handle set and lists which are formatted the same
// Helper function for the above
//
if (['set', 'list'].indexOf(meta.type) !== -1) {
cType = meta[meta.type + 'Type'];
return this._isString(cType)
? util.format('%s<%s>', meta.type, cType)
: null;
_isString(type) {
return this.type(type) === 'string';
}
return meta.type;
};
}
//
// Helper function for the above
// detect both empty string and null as a bad uuid value since cassandra will
// give us weird errors if we try and insert an empty string
//
Schema.prototype._isString = function (type) {
return this.type(type) === 'string';
};
function isBadUuid(value) {
return value === null || (typeof value === 'string' && value.length === 0);
}

@@ -1090,3 +1084,3 @@ /*

return function (timeuuid) {
var precision = TimeUuid.fromString(timeuuid).getDatePrecision();
const precision = TimeUuid.fromString(timeuuid).getDatePrecision();
return TimeUuid[type](precision.date, precision.ticks);

@@ -1101,3 +1095,3 @@ };

return function () {
var value;
let value;
switch (type) {

@@ -1113,3 +1107,3 @@ case 'uuid_v4':

break;
default :
default:
break;

@@ -1134,3 +1128,3 @@ }

var type = getType(value);
const type = getType(value);

@@ -1154,3 +1148,3 @@ if (type === 'date' && value.getTime() === 0) {

for (let i = 0; i < value.length; i++) {
var arrValue = value[i];
const arrValue = value[i];
if (!isObject(arrValue) || !visited.has(arrValue)) {

@@ -1161,5 +1155,5 @@ value[i] = valueToNullImpl(arrValue, getType, visited);

} else if (type === 'object') {
var keys = Object.keys(value);
const keys = Object.keys(value);
for (let i = 0; i < keys.length; i++) {
var keyValue = value[keys[i]];
const keyValue = value[keys[i]];
if (!isObject(keyValue) || !visited.has(keyValue)) {

@@ -1178,1 +1172,3 @@ value[keys[i]] = valueToNullImpl(keyValue, getType, visited);

}
module.exports = Schema;

@@ -1,3 +0,3 @@

var memoize = require('./memoize').memoize1;
const memoize = require('./memoize').memoize1;
module.exports = memoize(require('to-snake-case'));

@@ -0,6 +1,4 @@

const statements = require('./statements');
const CompoundStatement = require('./compound-statement');
var statements = require('./statements');
var CompoundStatement = require('./compound-statement');
/*

@@ -15,3 +13,3 @@ * Constructor function for the StatementBuilder responsible for creating

*/
var StatementBuilder = module.exports = function StatementBuilder(schema, options) {
const StatementBuilder = module.exports = function StatementBuilder(schema, options) {
this.schema = schema;

@@ -41,10 +39,4 @@ this.options = options || {};

//
var statement = new statements[action](this.schema);
var opts = statement.init(options, entity);
//
// Handle any errors in the init process, otherwise build it
//
if (this.typeOf(opts) === 'error') {
return opts;
}
const statement = new statements[action](this.schema);
const opts = statement.init(options, entity);

@@ -62,3 +54,3 @@ //

//
var compound = new CompoundStatement(this.schema);
const compound = new CompoundStatement(this.schema);
compound.add(statement.build(opts));

@@ -70,12 +62,11 @@ //

//
var lookupMap = this.schema.lookupTables;
var error;
var keys = Object.keys(lookupMap);
const lookupMap = this.schema.lookupTables;
const keys = Object.keys(lookupMap);
for (var i = 0; i < keys.length; i++) {
var table = lookupMap[keys[i]];
for (let i = 0; i < keys.length; i++) {
const table = lookupMap[keys[i]];
//
// Clone the initialized options and add the new table to them
//
var stmnt = new statements[action](this.schema);
const stmnt = new statements[action](this.schema);
//

@@ -91,8 +82,3 @@ // Set table for both steps for the various statements. We does this

//
var op = stmnt.init(options, entity);
if (this.typeOf(op) === 'error') {
error = op;
break;
}
const op = stmnt.init(options, entity);
op.table = table;

@@ -105,3 +91,3 @@ compound.add(stmnt.build(op));

//
return error ? error : compound;
return compound;
};

@@ -108,0 +94,0 @@ });

@@ -1,5 +0,5 @@

var snakeCase = require('../../snake-case');
var util = require('util');
const snakeCase = require('../../snake-case');
const util = require('util');
var specialActionMap = {
const specialActionMap = {
orderBy: {

@@ -10,3 +10,2 @@ cql: 'CLUSTERING ORDER BY (%s'

module.exports = With;
//

@@ -19,106 +18,106 @@ // What is a partial-statement? We are going to assume its a simple string

function With(opts) {
if (!(this instanceof With))
return new With(opts);
class With {
constructor(opts) {
if (!(this instanceof With))
return new With(opts);
this.cql = 'WITH ';
this.error = null;
this.cql = 'WITH ';
this.error = null;
var result = this.process(opts);
if (type(result) !== 'error') {
this.cql += result;
} else {
this.error = result;
try {
const result = this.process(opts);
this.cql += result;
} catch (err) {
this.error = err;
}
}
}
//
// Lets assume we have a set of actions to do for
//
// An example of what we expect to receive here.
// We handle each data structure differently because we attempt to output it as
// a string representation wrapped in the proper quotes for the text.
// {
// compaction: { /*object of compaction options*/ },
// gcGraceSeconds: 9680
// }
//
// and the return value here
// WITH compaction = {
// 'some_setting': 'someValue'
// } AND gc_grace_seconds = 9680;
//
With.prototype.process = function process(opts) {
var error;
var string = Object.keys(opts)
.map(function (action) {
var args = opts[action];
var executed = snakeCase(action);
var typeArg = type(args);
//
// Figure out what to do based on the type of args
// and the action
//
switch (typeArg) {
case 'object':
//
// Lets assume we have a set of actions to do for
//
// An example of what we expect to receive here.
// We handle each data structure differently because we attempt to output it as
// a string representation wrapped in the proper quotes for the text.
// {
// compaction: { /*object of compaction options*/ },
// gcGraceSeconds: 9680
// }
//
// and the return value here
// WITH compaction = {
// 'some_setting': 'someValue'
// } AND gc_grace_seconds = 9680;
//
process(opts) {
const string = Object.keys(opts)
.map(function (action) {
const args = opts[action];
const executed = snakeCase(action);
const typeArg = type(args);
//
// Figure out what to do based on the type of args
// and the action
//
switch (typeArg) {
case 'object':
//
// Special cases so we can be generic with this statement
//
if (specialActionMap[action])
return this[action](args, specialActionMap[action]);
//
// Remark: Convert the object representation into a string
// This is currently used for compaction as an example
//
return `${executed} = ${this[typeArg](args)}`;
//
// Special cases so we can be generic with this statement
// Wrap quotes around the string types
//
if (specialActionMap[action])
return this[action](args, specialActionMap[action]);
//
// Remark: Convert the object representation into a string
// This is currently used for compaction as an example
//
return executed + ' = ' + this[typeArg](args);
case 'string':
return `${executed} = '${args}'`;
case 'number':
return `${executed} = ${args}`;
default:
throw new Error(
`Cannot create with statement with ${typeArg} ${args}`
);
}
//
// Wrap quotes around the string types
// This might not be the only separator for these types of commands so
// this might need more variability
//
case 'string':
return executed + " = '" + args + "'";
case 'number':
return executed + ' = ' + args;
default:
error = new Error(
util.format('Cannot create with statement with %s %s', typeArg, args)
);
}
//
// This might not be the only separator for these types of commands so
// this might need more variability
//
}, this).join(' AND ');
}, this).join(' AND ');
return error ? error : string;
};
return string;
}
//
// Handle turning an object into a string for certain configuration
//
With.prototype.object = function object(mapping) {
return '{ \n' + Object.keys(mapping).map(function (key, i) {
//
// Check if we have to prefix with a comma to build the json object
//
var sub = i !== 0 ? ' ,' : ' ';
//
// Translate any keys to snake_case because thats what seems
// reasonable
//
return sub + util.format(" '%s' : '%s'", snakeCase(key), mapping[key]);
}).join('\n') + ' }';
};
//
// Handle turning an object into a string for certain configuration
//
object(mapping) {
return '{ \n' + Object.keys(mapping).map(function (key, i) {
//
// Check if we have to prefix with a comma to build the json object
//
const sub = i !== 0 ? ' ,' : ' ';
//
// Translate any keys to snake_case because thats what seems
// reasonable
//
return `${sub} '${snakeCase(key)}' : '${mapping[key]}'`;
}).join('\n') + ' }';
}
//
// Handle specific orderBy syntax
//
With.prototype.orderBy = function orderBy(args, opts) {
var cql = util.format(opts.cql, args.key);
if (args.order) cql += ' ' + args.order;
cql += ')';
return cql;
};
//
// Handle specific orderBy syntax
//
orderBy(args, opts) {
let cql = util.format(opts.cql, args.key);
if (args.order) cql += ` ${args.order}`;
cql += ')';
return cql;
}
}

@@ -128,1 +127,3 @@ function type(of) {

}
module.exports = With;

@@ -1,4 +0,2 @@

var util = require('util'),
const
With = require('../partial-statements/with'),

@@ -11,78 +9,79 @@ Statement = require('../statement');

//
var AlterStatement = module.exports = function () {
Statement.apply(this, arguments);
class AlterStatement extends Statement {
constructor(...args) {
super(...args);
this.types = ['TABLE'];
this.types = ['TABLE'];
this.typesLookup = this.types.reduce(function (acc, type) {
acc[type] = true;
return acc;
}, {});
};
util.inherits(AlterStatement, Statement);
//
// Remark: this returns the options passed into build
//
AlterStatement.prototype._init = function (options) {
var opts = {};
var w;
var actions = options.alter || options.actions || options.with || {};
opts.type = options.type && options.type.toUpperCase();
opts.table = options.table;
//
// Simple validation on type of alter statement
//
if (!opts.type || !this.typesLookup[opts.type]) {
return new Error('Invalid type ' + opts.type);
this.typesLookup = this.types.reduce(function (acc, type) {
acc[type] = true;
return acc;
}, {});
}
//
// Since the partial statement can error, we generate it in the init step
// and use it later
// Remark: this returns the options passed into build
//
if (actions && Object.keys(actions).length) {
w = new With(actions);
if (w.error) return w.error;
opts.with = w.cql;
}
_init(options) {
const opts = {};
return opts;
const actions = options.alter || options.actions || options.with || {};
opts.type = options.type && options.type.toUpperCase();
opts.table = options.table;
//
// Simple validation on type of alter statement
//
if (!opts.type || !this.typesLookup[opts.type]) {
throw new Error(`Invalid type ${opts.type}`);
}
};
//
// Since the partial statement can error, we generate it in the init step
// and use it later
//
if (actions && Object.keys(actions).length) {
const w = new With(actions);
if (w.error) throw w.error;
opts.with = w.cql;
}
AlterStatement.prototype.build = function (options) {
//
// Remark: Uppercase the type for the CQL. We might want to do some validation
// here on type (we should actually do that in statement-builder.
//
var type = options.type;
return opts;
}
this.cql += 'ALTER ' + type + ' ';
build(options) {
//
// Remark: Uppercase the type for the CQL. We might want to do some validation
// here on type (we should actually do that in statement-builder.
//
const type = options.type;
//
// switch on the `type` to determine what kind of alteration we are doing.
// This appends the specific alter command to the statement cql
//
switch (type) {
case 'TABLE':
this.xTable(options);
break;
default :
break;
this.cql += `ALTER ${type} `;
//
// switch on the `type` to determine what kind of alteration we are doing.
// This appends the specific alter command to the statement cql
//
switch (type) {
case 'TABLE':
this.xTable(options);
break;
default :
break;
}
return this;
}
return this;
};
AlterStatement.prototype.xTable = function (opts) {
var table = opts.table || this.table;
xTable(opts) {
const table = opts.table || this.table;
this.cql += table + ' ';
this.cql += `${table} `;
if (opts.with) {
this.cql += opts.with;
if (opts.with) {
this.cql += opts.with;
}
return this;
}
}
return this;
};
module.exports = AlterStatement;

@@ -1,56 +0,40 @@

var util = require('util');
var Statement = require('../statement');
const Statement = require('../statement');
var CreateStatement = module.exports = function () {
Statement.apply(this, arguments);
};
class CreateStatement extends Statement {
_init(options, entity) {
const opts = {};
const ret = this.schema.validate(this.schema.fixKeys(entity), 'create');
opts.entity = this.schema.deNull(ret);
util.inherits(CreateStatement, Statement);
//
// Allow ttl to be passed into an insert
//
if (options.ttl) opts.ttl = options.ttl;
CreateStatement.prototype._init = function (options, entity) {
var opts = {};
var ret = this.schema.validate(this.schema.fixKeys(entity), 'create');
if (this.typeOf(ret) === 'error') {
return ret;
return opts;
}
opts.entity = this.schema.deNull(ret);
build(options) {
const allFields = this.schema.fields();
const entity = options.entity;
//
// Handle lookup table writes.
//
const table = options.table || this.table;
if (this.typeOf(opts.entity) === 'error') {
return opts.entity;
}
const placeholders = new Array(allFields.length).join(', ?');
const ttlClause = options.ttl ? ` USING TTL ${options.ttl}` : '';
this.cql = `INSERT INTO ${table} (${allFields.join(', ')}) VALUES (?${placeholders})${ttlClause};`;
//
// Allow ttl to be passed into an insert
//
if (options.ttl) opts.ttl = options.ttl;
this.options = { executeAsPrepared: true, queryName: `insert-${this.name}` };
//
// Remark: This could be preparsed and put on options so we wouldn't have to know
// about the schema
//
this.params = this.schema.getValues(entity);
return opts;
};
return this;
}
}
CreateStatement.prototype.build = function (options) {
var allFields = this.schema.fields();
var entity = options.entity;
//
// Handle lookup table writes.
//
var table = options.table || this.table;
this.cql = util.format(
'INSERT INTO %s (%s) VALUES (?%s)%s;',
table,
allFields.join(', '),
new Array(allFields.length).join(', ?'),
// conditionally add ttl if it exists
options.ttl ? util.format(' USING TTL %d', options.ttl) : ''
);
this.options = { executeAsPrepared: true, queryName: 'insert-' + this.name };
//
// Remark: This could be preparsed and put on options so we wouldn't have to know
// about the schema
//
this.params = this.schema.getValues(entity);
return this;
};
module.exports = CreateStatement;

@@ -0,112 +1,96 @@

const Statement = require('../statement');
class FindStatement extends Statement {
_init(options, entity) {
const opts = {};
var util = require('util');
var Statement = require('../statement');
//
// We assess the length of conditions before and after. We ONLY want to end up
// doing a SELECT * from TABLE if we pass in zero conditions.
//
const conditions = options.conditions || entity;
const condLength = Object.keys(conditions).length;
//
// Parse the conditions into the intended data structure we need. See schema
// code
//
opts.conditionals = this.schema.createConditions(conditions);
//
// Inspect the query length and see if we intended on passing anything with
// a zero length, if we did not, we should error.
//
if (!Object.keys(opts.conditionals.query).length && condLength) {
throw new Error(`Insufficient conditions for find, ${JSON.stringify(conditions)}`);
}
opts.type = options.type;
//
// Transform any fields that were passed in
//
opts.fields = this.schema.fixKeys(options.fields || []);
var FindStatement = module.exports = function () {
Statement.apply(this, arguments);
};
opts.limit = options.limit;
opts.allowFiltering = options.allowFiltering;
util.inherits(FindStatement, Statement);
FindStatement.prototype._init = function (options, entity) {
var opts = {};
//
// We assess the length of conditions before and after. We ONLY want to end up
// doing a SELECT * from TABLE if we pass in zero conditions.
//
var conditions = options.conditions || entity;
var condLength = Object.keys(conditions).length;
//
// Parse the conditions into the intended data structure we need. See schema
// code
//
opts.conditionals = this.schema.createConditions(conditions);
if (this.typeOf(opts.conditionals) === 'error') {
return opts.conditionals;
return opts;
}
//
// Inspect the query length and see if we intended on passing anything with
// a zero length, if we did not, we should error.
//
if (!Object.keys(opts.conditionals.query).length && condLength) {
return new Error(util.format('Insufficient conditions for find, %j', conditions));
}
opts.type = options.type;
//
// Transform any fields that were passed in
//
opts.fields = this.schema.fixKeys(options.fields || []);
opts.limit = options.limit;
opts.allowFiltering = options.allowFiltering;
build({ conditionals, fields, limit, type, allowFiltering }) {
//
// We default to the table set on conditionals if we are doing a find on
// a lookup table. We establish this when we create conditions so it kind of
// makes sense.
//
const table = conditionals.table || this.table;
return opts;
};
const fieldsCql = (type === 'count')
? 'COUNT(*)'
: this.schema.fieldString(fields);
FindStatement.prototype.build = function (options) {
var conditionals = options.conditionals;
var fields = options.fields;
var limit = options.limit;
var type = options.type;
this.cql = `SELECT ${fieldsCql} FROM ${table}`;
this.name += `${fields.sort().join('-') || type}-from-${table}`;
//
// We default to the table set on conditionals if we are doing a find on
// a lookup table. We establish this when we create conditions so it kind of
// makes sense.
//
var table = conditionals.table || this.table;
if (conditionals.query && conditionals.query.length) {
this.cql += ` WHERE ${conditionals.query.join(' AND ')}`;
this.name += `-by-${conditionals.fields.sort().join('-')}`;
}
var fieldsCql;
if (type === 'count') {
fieldsCql = 'COUNT(*)';
} else {
fieldsCql = this.schema.fieldString(fields);
}
//
// Limit the query
//
if (typeof limit === 'number' && limit > 0) {
this.cql += ` LIMIT ${limit}`;
this.name += `-limit-${limit}`;
}
this.cql = util.format('SELECT %s FROM %s', fieldsCql, table);
this.name += (fields.sort().join('-') || type) + '-from-' + table;
if (conditionals.query && conditionals.query.length) {
this.cql += util.format(' WHERE %s', conditionals.query.join(' AND '));
this.name += '-by-' + conditionals.fields.sort().join('-');
}
//
// Limit the query
//
if (typeof limit === 'number' && limit > 0) {
this.cql += util.format(' LIMIT %s', limit);
this.name += util.format('-limit-%s', limit);
}
if (options.allowFiltering) {
this.cql += ' ALLOW FILTERING';
this.name += '-allow-filtering';
}
//
// This should ideally be configurable
//
this.options = {
executeAsPrepared: true,
queryName: this.name,
if (allowFiltering) {
this.cql += ' ALLOW FILTERING';
this.name += '-allow-filtering';
}
//
// For streaming and large queries
// This should ideally be configurable
//
autoPage: true
};
this.params = conditionals.params;
this.options = {
executeAsPrepared: true,
queryName: this.name,
//
// For streaming and large queries
//
autoPage: true
};
this.params = conditionals.params;
if (type === 'first' || type === 'count') {
this.mutate = function (query) {
return query.first();
};
} else if (type === 'one') {
this.mutate = function (query) {
return query.single();
};
if (type === 'first' || type === 'count') {
this.mutate = function (query) {
return query.first();
};
} else if (type === 'one') {
this.mutate = function (query) {
return query.single();
};
}
return this;
}
}
return this;
};
module.exports = FindStatement;

@@ -0,52 +1,43 @@

const Statement = require('../statement');
class RemoveStatement extends Statement {
_init(options, entity) {
const opts = {};
var util = require('util');
var Statement = require('../statement');
const conditions = options.conditions || entity;
var RemoveStatement = module.exports = function () {
Statement.apply(this, arguments);
};
opts.conditionals = this.schema.createRemoveConditions(conditions, options.table);
util.inherits(RemoveStatement, Statement);
if (!Object.keys(opts.conditionals.query).length) {
throw new Error(`Insufficient conditions to remove ${JSON.stringify(conditions)}`);
}
RemoveStatement.prototype._init = function (options, entity) {
var opts = {};
return opts;
}
var conditions = options.conditions || entity;
build(options) {
const conditionals = options.conditionals;
//
// Handle lookup table deletes by being able to pass in the table;
// Public API is not allowed to do this
//
const table = conditionals.table || this.table;
opts.conditionals = this.schema.createRemoveConditions(conditions, options.table);
//
// THe actual CQL
//
this.cql = `DELETE FROM ${table}`;
this.cql += ` WHERE ${conditionals.query.join(' AND ')}`;
if (this.typeOf(opts.conditionals) === 'error') {
return opts.conditionals;
}
//
// Name of the query to pass to priam
//
this.name += `remove-${table}${conditionals.fields.sort().join('-')}`;
this.options = { executeAsPrepared: true, queryName: this.name };
this.params = conditionals.params;
if (!Object.keys(opts.conditionals.query).length) {
return new Error(util.format('Insufficient conditions to remove %j', conditions));
return this;
}
}
return opts;
};
RemoveStatement.prototype.build = function (options) {
var conditionals = options.conditionals;
//
// Handle lookup table deletes by being able to pass in the table;
// Public API is not allowed to do this
//
var table = conditionals.table || this.table;
//
// THe actual CQL
//
this.cql = util.format('DELETE FROM %s', table);
this.cql += util.format(' WHERE %s', conditionals.query.join(' AND '));
//
// Name of the query to pass to priam
//
this.name += 'remove-' + table + conditionals.fields.sort().join('-');
this.options = { executeAsPrepared: true, queryName: this.name };
this.params = conditionals.params;
return this;
};
module.exports = RemoveStatement;
/* eslint no-process-env: 0 */
const clone = require('clone');
const Statement = require('../statement');
const With = require('../partial-statements/with');
var util = require('util');
var clone = require('clone');
var Statement = require('../statement');
var With = require('../partial-statements/with');
class TableStatement extends Statement {
_init(options) {
const alter = options.alter || options.with || {};
const opts = {};
var TableStatement = module.exports = function () {
Statement.apply(this, arguments);
};
//
// We want to converge on everything being under alter/with option
//
if (options.orderBy || alter.orderBy) {
//
// Currently this is expected to be an object with properties
// { key: 'createdAt', order: 'ascending' }
//
const orderBy = clone(options.orderBy || alter.orderBy);
//
// Map it to the correct string if passed in correctly if exists (its ok for
// this to be undefined).
//
orderBy.order = this.schema.orderMap[orderBy.order && orderBy.order.toLowerCase()];
//
// Test if the key exists and returns the transformed key to use if it does,
// otherwise returns undefined.
//
orderBy.key = this.schema.exists(orderBy.key);
if (!this.schema.exists(orderBy.key)) {
throw new Error(`${orderBy.key} does not exist for the ${this.name} schema`);
}
util.inherits(TableStatement, Statement);
alter.orderBy = orderBy;
}
TableStatement.prototype._init = function (options) {
var alter = options.alter || options.with || {};
var opts = {};
var orderBy;
var w;
if (options.lookupKey) {
opts.lookupKey = options.lookupKey;
opts.lookupColumn = this.schema.meta[opts.lookupKey];
if (['map', 'set'].indexOf(opts.lookupColumn.type) !== -1) {
throw new Error(
'Creating lookup table with type: '
+ opts.lookupColumn.type);
}
}
//
// We want to converge on everything being under alter/with option
//
if (options.orderBy || alter.orderBy) {
opts.useIndex = !!options.useIndex;
//
// Currently this is expected to be an object with properties
// { key: 'createdAt', order: 'ascending' }
// If we are altering the table with a `with`
//
orderBy = clone(options.orderBy || alter.orderBy);
if (alter && Object.keys(alter).length) {
const w = new With(alter);
if (w.error) return w.error;
opts.with = w.cql;
}
//
// Map it to the correct string if passed in correctly if exists (its ok for
// this to be undefined).
// `ensure` or `drop` currently
//
orderBy.order = this.schema.orderMap[orderBy.order && orderBy.order.toLowerCase()];
//
// Test if the key exists and returns the transformed key to use if it does,
// otherwise returns undefined.
//
orderBy.key = this.schema.exists(orderBy.key);
if (!this.schema.exists(orderBy.key)) {
return new Error(orderBy.key + ' does not exist for the ' + this.name + ' schema');
}
opts.type = options.type;
alter.orderBy = orderBy;
}
if (options.lookupKey) {
opts.lookupKey = options.lookupKey;
opts.lookupColumn = this.schema.meta[opts.lookupKey];
if (['map', 'set'].indexOf(opts.lookupColumn.type) !== -1) {
return new Error(
'Creating lookup table with type: '
+ opts.lookupColumn.type);
const env = process.env.NODE_ENV;
if (['prod', 'production'].indexOf(env) !== -1
&& opts.type === 'drop'
&& !options.force) {
throw new Error('Please don\'t try and drop your prod tables without being certain');
}
}
opts.useIndex = !!options.useIndex;
//
// If we are altering the table with a `with`
//
if (alter && Object.keys(alter).length) {
w = new With(alter);
if (w.error) return w.error;
opts.with = w.cql;
return opts;
}
//
// `ensure` or `drop` currently
//
opts.type = options.type;
build(options) {
const schema = options.schema || this.schema;
const type = options.type;
var env = process.env.NODE_ENV;
if (['prod', 'production'].indexOf(env) !== -1
&& opts.type === 'drop'
&& !options.force) {
return new Error('Please don\'t try and drop your prod tables without being certain');
}
this.options = {
executeAsPrepared: true,
queryName: `${type}-table-${schema.name}`
};
return opts;
};
const tableName = this._computeTable(options);
TableStatement.prototype.build = function (options) {
var schema = options.schema || this.schema;
var type = options.type;
var tableName;
this.params = [];
this.options = {
executeAsPrepared: true,
queryName: type + '-table-' + schema.name
};
this.cql = this._compile(options, tableName, options.lookupKey || schema.primaryKeys());
tableName = this._computeTable(options);
return this;
}
this.params = [];
_computeTable(options) {
const schema = options.schema || this.schema;
const type = options.type;
this.cql = this._compile(options, tableName, options.lookupKey || schema.primaryKeys());
this.options.queryName = [type, 'index', schema.name, options.lookupKey].join('-');
return this;
};
if (!options.lookupKey) return this.table;
TableStatement.prototype._computeTable = function (options) {
var schema = options.schema || this.schema;
var type = options.type;
var table;
const table = this.schema.lookupTables[options.lookupKey];
this.options.queryName = [type, 'index', schema.name, options.lookupKey].join('-');
if (table) return table;
//
// Compute the lookupTable name based on the key and if its used as an index
// or not
//
return options.useIndex
? `${schema.name}_${options.lookupKey}`
: `${schema.name}_by_${options.lookupKey.replace(/_\w+$/, '')}`;
}
if (!options.lookupKey) return this.table;
table = this.schema.lookupTables[options.lookupKey];
if (table) return table;
//
// Compute the lookupTable name based on the key and if its used as an index
// or not
// Figure out what statement will be executed
//
return options.useIndex
? schema.name + '_' + options.lookupKey
: schema.name + '_by_' + options.lookupKey.replace(/_\w+$/, '');
};
_compile(options) {
const fn = this[`_${options.type}`];
if (fn) return fn.apply(this, arguments);
//
// Figure out what statement will be executed
//
TableStatement.prototype._compile = function (options) {
var fn = this['_' + options.type];
if (fn) return fn.apply(this, arguments);
// This shouldn't happen
throw new Error(`Invalid type ${options.type}`);
}
// This shouldn't happen
throw new Error('Invalid type ' + options.type);
};
/*
* Drop the table or index
* @options {Object} options passed in
* @tableName {String} Name of table or index
* @returns {String} cql value for statement to be executed
*/
_drop(options, tableName) {
return [
'DROP',
(options.useIndex ? 'INDEX' : 'TABLE'),
tableName
].join(' ');
}
/*
* Drop the table or index
* @options {Object} options passed in
* @tableName {String} Name of table or index
* @returns {String} cql value for statement to be executed
*/
TableStatement.prototype._drop = function (options, tableName) {
return [
'DROP',
(options.useIndex ? 'INDEX' : 'TABLE'),
tableName
].join(' ');
};
_ensure(options, tableName, primaryKeys) {
const schema = options.schema || this.schema;
const secondaryKeys = schema.secondaryKeys();
TableStatement.prototype._ensure = function (options, tableName, primaryKeys) {
var schema = options.schema || this.schema;
var secondaryKeys = schema.secondaryKeys();
tableName = tableName || this.table;
primaryKeys = primaryKeys || schema.primaryKeys();
tableName = tableName || this.table;
primaryKeys = primaryKeys || schema.primaryKeys();
let cql = '';
var cql = '';
if (options.useIndex) {
cql += `CREATE INDEX IF NOT EXISTS ${tableName} on ${schema.name}(${primaryKeys})`;
return cql;
}
if (options.useIndex) {
cql += 'CREATE INDEX IF NOT EXISTS ' + tableName
+ ' on ' + schema.name + '(' + primaryKeys + ')';
return cql;
}
cql += `CREATE TABLE IF NOT EXISTS ${tableName} (\n`;
cql += 'CREATE TABLE IF NOT EXISTS ' + tableName + ' (\n';
Object.keys(schema.meta).forEach(function (key) {
const column = schema.meta[key];
cql += ' ';
//
// Handle all the higher level types
//
//
if (['map', 'set', 'list'].indexOf(column.type) !== -1) {
const innerTypes = [].concat(column[`${column.type}Type`]).join(',');
cql += `${key} ${column.type}<${innerTypes}>,\n`;
return;
}
cql += `${key} ${column.type},\n`;
}, this);
Object.keys(schema.meta).forEach(function (key) {
var column = schema.meta[key];
cql += ' ';
//
// Handle all the higher level types
// Handle both compoundKeys as well as
//
const primaryKeyExpr = (
schema.compositePrimary
? `(${primaryKeys.join(', ')})`
: primaryKeys
);
cql += ` PRIMARY KEY (${primaryKeyExpr}`;
//
if (['map', 'set', 'list'].indexOf(column.type) !== -1) {
cql += key + ' ' + column.type + '<'
+ [].concat(column[column.type + 'Type']).join(',') + '>,\n';
return;
// Properly support secondary keys / clustering keys
//
cql += secondaryKeys && secondaryKeys.length
? `, ${secondaryKeys.join(', ')}`
: '';
//
// Close keys paren
//
cql += ')\n';
//
// Close table statement paren
//
cql += ')';
// If we have a with statement to append, lets do that here
if (options.with) {
cql += ` ${options.with}`;
}
cql += key + ' ' + column.type + ',\n';
}, this);
//
// Handle both compoundKeys as well as
//
cql += ' PRIMARY KEY (' + (
schema.compositePrimary
? '(' + primaryKeys.join(', ') + ')'
: primaryKeys
);
//
// Properly support secondary keys / clustering keys
//
cql += secondaryKeys && secondaryKeys.length
? ', ' + secondaryKeys.join(', ')
: '';
//
// Close keys paren
//
cql += ')\n';
//
// Close table statement paren
//
cql += ')';
// If we have a with statement to append, lets do that here
if (options.with) {
cql += ' ' + options.with;
cql += ';';
return cql;
}
}
cql += ';';
return cql;
};
module.exports = TableStatement;

@@ -0,610 +1,572 @@

const clone = require('clone');
const assign = require('object-assign');
const CompoundStatement = require('../compound-statement');
const CreateStatement = require('./create');
const RemoveStatement = require('./remove');
const Statement = require('../statement');
const mapKeyBadRegex = /--/;
const positiveIntRegex = /^\d+$/;
var util = require('util');
var clone = require('clone');
var assign = require('object-assign');
var CompoundStatement = require('../compound-statement');
var CreateStatement = require('./create');
var RemoveStatement = require('./remove');
var Statement = require('../statement');
var mapKeyBadRegex = /--/;
var positiveIntRegex = /^\d+$/;
var UpdateStatement = module.exports = function () {
CompoundStatement.apply(this, arguments);
this.buffer = [];
//
// Track the index of the statement we need to append to based on the type of
// operations. Lets map it by type as that should reduce the overall number of
// statements
//
this.index = {};
this.index.set = 0;
this.index.list = 0;
this.index.map = 0;
this.index.delete = 0;
};
util.inherits(UpdateStatement, CompoundStatement);
//
// For lookup tables this is going to be expensive as we need to do a find
// before this even happens unless we pass in the previous entity
// Special statement used for update
//
UpdateStatement.prototype._init = function (options, entity) {
var previous,
opts = {},
changed,
prev,
key;
function PartialStatement() {
this.cql = [];
this.params = [];
this.options = {};
}
entity = this.schema.fixKeys(entity);
class UpdateStatement extends CompoundStatement {
constructor(...args) {
super(...args);
if (options.previous) {
previous = this.schema.fixKeys(options.previous);
}
//
// Assess whether or not one of our primaryKeys has changed (pertaining to
// lookup tables). Previous is required to be a fully formed object that
// contains ALL properties, otherwise we will have a very bad time with this
// create/delete that needs to happen
//
if (this.schema.lookups && options.table) {
key = this.schema._reverseLookupKeyMap[options.table];
this.buffer = [];
//
// If the key exists in the entity that is being updated and it is not
// equal to that of the previous entity, a primary key has changed
// Track the index of the statement we need to append to based on the type of
// operations. Lets map it by type as that should reduce the overall number of
// statements
//
changed = entity[key] && previous && entity[key] !== previous[key];
this.index = {};
this.index.set = 0;
this.index.list = 0;
this.index.map = 0;
this.index.delete = 0;
}
//
// Pass the changed status to the build function so we know we are going to do
// a create/delete statement here
// For lookup tables this is going to be expensive as we need to do a find
// before this even happens unless we pass in the previous entity
//
opts.changed = changed;
_init(options, entity) {
let
previous,
changed,
key;
const opts = {};
//
// If we have changed, validate that the previous value would be valid for
// a create operation. This will guarantee that we have a proper previous
// value passed in and not just a stub which would cause terrible things to
// happen to the lookup tables as it would be wrong and out of sync
//
if (changed) {
prev = this.schema.validate(this.schema.deNull(previous), 'create');
if (this.typeOf(prev) === 'error') {
return prev;
entity = this.schema.fixKeys(entity);
if (options.previous) {
previous = this.schema.fixKeys(options.previous);
}
previous = prev;
}
//
// We need to generate specific conditions
// Remark: For updating lookup tables, we need to use the previous entity in
// order to create the conditions so the where clause in finding it is correct
//
opts.conditionals = this.schema.createUpdateConditions(previous || entity, options.table);
//
// Handle the error case when we do not have the appropriate conditions
//
if (this.typeOf(opts.conditionals) === 'error') {
return opts.conditionals;
}
//
// We need a raw transformed entity
// TODO: Proper validation on update with the different data structures that
// can be passed for operations on higher level types, <map> <set> <list>
// In cases where we have detected a primaryKey change, we create an entity
// that will be used in a `create` statement instead
//
//
// Assess whether or not one of our primaryKeys has changed (pertaining to
// lookup tables). Previous is required to be a fully formed object that
// contains ALL properties, otherwise we will have a very bad time with this
// create/delete that needs to happen
//
if (this.schema.lookups && options.table) {
key = this.schema._reverseLookupKeyMap[options.table];
//
// If the key exists in the entity that is being updated and it is not
// equal to that of the previous entity, a primary key has changed
//
changed = entity[key] && previous && entity[key] !== previous[key];
}
opts.entity = this.schema.deNull(
changed ? this._entityToReplace(previous, entity) : entity
);
//
// Pass the changed status to the build function so we know we are going to do
// a create/delete statement here
//
opts.changed = changed;
if (this.typeOf(opts.entity) === 'error') {
return opts.entity;
}
//
// If we have changed, validate that the previous value would be valid for
// a create operation. This will guarantee that we have a proper previous
// value passed in and not just a stub which would cause terrible things to
// happen to the lookup tables as it would be wrong and out of sync
//
if (changed) {
previous = this.schema.validate(this.schema.deNull(previous), 'create');
}
//
// Validate and transform the entity
//
opts.entity = this.schema.validate(opts.entity, 'update');
//
// We need to generate specific conditions
// Remark: For updating lookup tables, we need to use the previous entity in
// order to create the conditions so the where clause in finding it is correct
//
opts.conditionals = this.schema.createUpdateConditions(previous || entity, options.table);
if (this.typeOf(opts.entity) === 'error') {
return opts.entity;
}
//
// We need a raw transformed entity
// TODO: Proper validation on update with the different data structures that
// can be passed for operations on higher level types, <map> <set> <list>
// In cases where we have detected a primaryKey change, we create an entity
// that will be used in a `create` statement instead
//
//
// Pass down the table if we are dealing with lookup tables
//
opts.table = options.table;
opts.entity = this.schema.deNull(
changed ? this._entityToReplace(previous, entity) : entity
);
//
// Allow ttl to be passed into an update
//
if (options.ttl) opts.ttl = options.ttl;
//
// Validate and transform the entity
//
opts.entity = this.schema.validate(opts.entity, 'update');
return opts;
};
//
// Pass down the table if we are dealing with lookup tables
//
opts.table = options.table;
//
// Do a merge of the previous and entity while taking into consideration the
// special data-structures we deal with the special commands for sets/lists etc.
//
UpdateStatement.prototype._entityToReplace = function (previous, entity) {
var self = this;
//
// Create a normalized entity using the previous as a reference so that we can
// just do a proper shallow merge with the previous to get the entity we want
//
var normalizedEntity = Object.keys(entity).reduce(function (acc, field) {
var meta = self.schema.fieldMeta(field);
var value;
//
// Allow ttl to be passed into an update
//
if (options.ttl) opts.ttl = options.ttl;
switch (meta.type) {
//
// For a map we need to just merge the values with previous
//
case 'map':
value = assign(previous[field] || {}, entity[field]);
break;
//
// Detect if we have a special version data structure, and take the
// appropriate actions based on the `add` or `remove`
//
case 'set':
value = self._handleSetOrList(entity[field], value);
break;
case 'list':
value = self._handleList(entity[field], value);
break;
default:
value = entity[field];
break;
}
acc[field] = value;
return acc;
}, {});
//
// Return the shallow merged version of the entity to put into cassandra
//
return assign(previous, normalizedEntity);
};
//
// Remark: We do not do any deletes in update because we are very wary of creating
// tombstones. We use a specific strategy of setting `null-like` characters for
// each type in order to prevent this from happening
//
UpdateStatement.prototype.build = function (options) {
var conditionals = options.conditionals;
var entity = options.entity;
//
// Remark: If we have a lookup table passed down we use it when considering the
// filtering
//
var isLookup = !!options.table;
var table = options.table || this.table;
//
// This is a special case where the primaryKey has changed which means an
// update statement will not suffice, we need a delete and a create statement.
// We ensure that we only do these create and remove statements in the case where
//
if (options.changed) {
return this.replaceLookupRecord(options);
return opts;
}
this.options = { executeAsPrepared: true, queryName: 'update-' + table };
//
// Create a criteria object that is used to add the where clause info needed
// for each statement that gets created
// Do a merge of the previous and entity while taking into consideration the
// special data-structures we deal with the special commands for sets/lists etc.
//
this.criteria = {};
this.criteria.cql = conditionals.query.join(' AND ');
this.criteria.params = conditionals.params;
Object.keys(entity)
_entityToReplace(previous, entity) {
//
// Remark: This filtering should be disabled when dealing with updating
// lookup tables with primary keys.
// Create a normalized entity using the previous as a reference so that we can
// just do a proper shallow merge with the previous to get the entity we want
//
// Why does this filter exist? Assumption: We assume that the primary key
// never changes but this does not hold true when we are dealing with lookup
// tables in certain cases so we need to handle this here
//
.filter(function (field) {
//
// Filter out the primary/secondary keys
// UNLESS we are dealing with lookup tables since we need to update those
// keys properly
//
// When we are a lookup table we only filter out the primary key
// associated with that table, otherwise cassandra yells at us
return isLookup
? this.schema._reverseLookupKeyMap[table] !== field
: !this.schema.isKey(field);
}, this)
.forEach(function (field) {
//
// Grab the value of the update
//
var value = entity[field];
//
// Get the column metadata for the given field and create the right kind
// of statement
//
var meta = this.schema.fieldMeta(field);
const normalizedEntity = Object.keys(entity).reduce((acc, field) => {
const meta = this.schema.fieldMeta(field);
let value;
switch (meta.type) {
//
// For a map we need to just merge the values with previous
//
case 'map':
this.mapUpdate(field, value, meta);
value = assign(previous[field] || {}, entity[field]);
break;
//
// Detect if we have a special version data structure, and take the
// appropriate actions based on the `add` or `remove`
//
case 'set':
this.setUpdate(field, value, meta);
value = this._handleSetOrList(entity[field], value);
break;
case 'list':
this.listUpdate(field, value, meta);
value = this._handleList(entity[field], value);
break;
default:
this.columnUpdate(field, value, meta);
value = entity[field];
break;
}
}, this);
acc[field] = value;
return acc;
}, {});
//
// Return the shallow merged version of the entity to put into cassandra
//
return assign(previous, normalizedEntity);
}
//
// Iterate through the statements we created, assess them and build the actual
// statements array if they are valid
// NOTE: Partial Statements end up being a special kind of statement that has
// an array as its `cql` property rather than a string to make it more easily
// extensible. (many many things can be updated in a single statement). The
// finalizeStatement takes care of normalizing these partials into a proper
// full statement
// Remark: We do not do any deletes in update because we are very wary of creating
// tombstones. We use a specific strategy of setting `null-like` characters for
// each type in order to prevent this from happening
//
for (var i = 0; i < this.buffer.length; i++) {
var partialStatement = this.buffer[i];
if (partialStatement.cql.length) {
build(options) {
const conditionals = options.conditionals;
const entity = options.entity;
//
// Remark: If we have a lookup table passed down we use it when considering the
// filtering
//
const isLookup = !!options.table;
const table = options.table || this.table;
//
// This is a special case where the primaryKey has changed which means an
// update statement will not suffice, we need a delete and a create statement.
// We ensure that we only do these create and remove statements in the case where
//
if (options.changed) {
return this.replaceLookupRecord(options);
}
this.options = { executeAsPrepared: true, queryName: `update-${table}` };
//
// Create a criteria object that is used to add the where clause info needed
// for each statement that gets created
//
this.criteria = {};
this.criteria.cql = conditionals.query.join(' AND ');
this.criteria.params = conditionals.params;
Object.keys(entity)
//
// Build the REAL statement and push it to the array
// Remark: This filtering should be disabled when dealing with updating
// lookup tables with primary keys.
//
this.statements.push(this.finalizeStatement(options, table, partialStatement));
}
}
this.buffer.length = 0;
// Why does this filter exist? Assumption: We assume that the primary key
// never changes but this does not hold true when we are dealing with lookup
// tables in certain cases so we need to handle this here
//
.filter(function (field) {
//
// Filter out the primary/secondary keys
// UNLESS we are dealing with lookup tables since we need to update those
// keys properly
//
// When we are a lookup table we only filter out the primary key
// associated with that table, otherwise cassandra yells at us
return isLookup
? this.schema._reverseLookupKeyMap[table] !== field
: !this.schema.isKey(field);
}, this)
.forEach(function (field) {
//
// Grab the value of the update
//
const value = entity[field];
//
// Get the column metadata for the given field and create the right kind
// of statement
//
const meta = this.schema.fieldMeta(field);
return this;
};
switch (meta.type) {
case 'map':
this.mapUpdate(field, value, meta);
break;
case 'set':
this.setUpdate(field, value, meta);
break;
case 'list':
this.listUpdate(field, value, meta);
break;
default:
this.columnUpdate(field, value, meta);
break;
}
}, this);
//
// This is a special function that creates a create and a remove statement based
// on the options given to be executed in cases where
//
UpdateStatement.prototype.replaceLookupRecord = function (options) {
this.statements.push(new RemoveStatement(this.schema).build(options));
this.statements.push(new CreateStatement(this.schema).build(options));
return this;
};
//
// Iterate through the statements we created, assess them and build the actual
// statements array if they are valid
// NOTE: Partial Statements end up being a special kind of statement that has
// an array as its `cql` property rather than a string to make it more easily
// extensible. (many many things can be updated in a single statement). The
// finalizeStatement takes care of normalizing these partials into a proper
// full statement
//
for (let i = 0; i < this.buffer.length; i++) {
const partialStatement = this.buffer[i];
if (partialStatement.cql.length) {
//
// Build the REAL statement and push it to the array
//
this.statements.push(this.finalizeStatement(options, table, partialStatement));
}
}
this.buffer.length = 0;
//
// Handle map updates which receives an object with { key: value }
// as the `value` param
//
UpdateStatement.prototype.mapUpdate = function (field, value, meta) {
return this;
}
//
// If the value itself is null, set it to an empty object and set the map
// equal to that
// This is a special function that creates a create and a remove statement based
// on the options given to be executed in cases where
//
if (value === null) {
value = {};
replaceLookupRecord(options) {
this.statements.push(new RemoveStatement(this.schema).build(options));
this.statements.push(new CreateStatement(this.schema).build(options));
return this;
}
if (Array.isArray(value) || !(value && typeof value === 'object')) {
throw new Error('Tried to insert value "'
+ value + '" into map "'
+ field + '" in table "'
+ this.table + '". Value should be an object.');
}
//
// Remark: In theory this validation should happen earlier and this should simply
// generate the statement (which is very simple)
// Handle map updates which receives an object with { key: value }
// as the `value` param
//
Object.keys(value).forEach(function (mapKey) {
var mapValue = value[mapKey];
mapUpdate(field, value, meta) {
//
// TODO: This validation should be handled by joi schema at a higher level
// If the value itself is null, set it to an empty object and set the map
// equal to that
//
if (mapKeyBadRegex.test(mapKey)) {
throw new Error('Tried to insert invalid map key "'
+ mapKey + '" into map "'
+ field + '" in table "'
+ this.table + '".');
if (value === null) {
value = {};
}
if (Array.isArray(value) || !(value && typeof value === 'object')) {
throw new Error(`Tried to insert value "${value}" into map "${field}" in table "${this.table}". Value should be an object.`);
}
//
// Strip any undefined values, TODO: This should be done before hand
// Remark: In theory this validation should happen earlier and this should simply
// generate the statement (which is very simple)
//
if (typeof mapValue === 'undefined') {
delete value[mapKey];
}
Object.keys(value).forEach(mapKey => {
const mapValue = value[mapKey];
//
// TODO: This validation should be handled by joi schema at a higher level
//
if (mapKeyBadRegex.test(mapKey)) {
throw new Error(`Tried to insert invalid map key "${mapKey}" into map "${field}" in table "${this.table}".`);
}
//
// Strip any undefined values, TODO: This should be done before hand
//
if (typeof mapValue === 'undefined') {
delete value[mapKey];
}
//
// Remark: Since null signifies a delete, we explicitly set the property to null.
// Since we don't really want to delete (it creates tombstones) we should
// set this to a value based on the map's valueType before we even get here
// TODO: See if this can be done in the same statement as the full
// collection statement. I feel like this value would need to be stripped in
// any case or would the null be handled correctly if we do it as a standard
// collection statement?
// if (mapValue === null) {
// statement.cql.push(field + '[\'' + mapKey + '\']' + ' = ?');
// statement.params.push(this.schema.valueOf(field, mapValue));
// }
//
// Remark: Since null signifies a delete, we explicitly set the property to null.
// Since we don't really want to delete (it creates tombstones) we should
// set this to a value based on the map's valueType before we even get here
// TODO: See if this can be done in the same statement as the full
// collection statement. I feel like this value would need to be stripped in
// any case or would the null be handled correctly if we do it as a standard
// collection statement?
// if (mapValue === null) {
// statement.cql.push(`${field}['${mapKey}'] = ?`);
// statement.params.push(this.schema.valueOf(field, mapValue));
// }
});
this.generateCollectionStatement({
field: field,
value: value,
type: meta.type,
operator: '+',
suffix: true
});
}
}, this);
this.generateCollectionStatement({
field: field,
value: value,
type: meta.type,
operator: '+',
suffix: true
});
};
//
// Remark: Currently we do not allow objects to be passed in for a set due to schema
// validation, we should consider this for update
//
// value: [] or { add: [], remove: [] }
//
UpdateStatement.prototype.setUpdate = function (field, value, meta) {
var type = meta.type;
//
// Assess the typeof the value that is being passed in for the set-type
// Remark: Currently we do not allow objects to be passed in for a set due to schema
// validation, we should consider this for update
//
switch (this.typeOf(value)) {
// value: [] or { add: [], remove: [] }
//
setUpdate(field, value, meta) {
const type = meta.type;
//
// Directly set the value when its an array
// Assess the typeof the value that is being passed in for the set-type
//
case 'array':
switch (this.typeOf(value)) {
//
// We just set the array like a regular column
// Directly set the value when its an array
//
this.columnUpdate(field, value, meta);
break;
case 'object':
['add', 'remove'].forEach(function (key) {
if (!value[key] || !value[key].length) return;
this.generateCollectionStatement({
field: field,
value: value[key],
type: type,
operator: key === 'remove' ? '-' : '+',
suffix: true
});
}, this);
break;
default:
//
// Validation should catch this so this case shouldnt be hit
//
throw new Error('Invalid value ' + value + ' for set update on ' + field);
}
};
//
// List: [] or { prepend: [], append: [], remove: [], index: { idx: value }
//
UpdateStatement.prototype.listUpdate = function (field, value, meta) {
var type = meta.type;
switch (this.typeOf(value)) {
case 'array':
this.columnUpdate(field, value, meta);
break;
case 'object':
['prepend', 'append', 'remove']
.forEach(function (key) {
//
// If we don't contain the appropriate keys, do nothing
//
case 'array':
//
// We just set the array like a regular column
//
this.columnUpdate(field, value, meta);
break;
case 'object':
['add', 'remove'].forEach(function (key) {
if (!value[key] || !value[key].length) return;
this.generateCollectionStatement({
field: field,
value: !Array.isArray(value[key]) ? [value[key]] : value[key],
value: value[key],
type: type,
operator: key === 'remove' ? '-' : '+',
suffix: key !== 'prepend'
suffix: true
});
}, this);
break;
default:
//
// Validation should catch this so this case shouldnt be hit
//
throw new Error(`Invalid value ${value} for set update on ${field}`);
}
}
//
// List: [] or { prepend: [], append: [], remove: [], index: { idx: value }
//
listUpdate(field, value, meta) {
const type = meta.type;
switch (this.typeOf(value)) {
case 'array':
this.columnUpdate(field, value, meta);
break;
case 'object':
['prepend', 'append', 'remove']
.forEach(function (key) {
//
// If we don't contain the appropriate keys, do nothing
//
if (!value[key] || !value[key].length) return;
this.generateCollectionStatement({
field: field,
value: !Array.isArray(value[key]) ? [value[key]] : value[key],
type: type,
operator: key === 'remove' ? '-' : '+',
suffix: key !== 'prepend'
});
}, this);
//
// Index operations are a little bit more complex
//
if (value.index
&& this.typeOf(value.index) === 'object'
&& Object.keys(value.index).length) {
this.generateListIndexStatement(field, value.index, meta);
}
break;
default:
throw new Error(`Invalid value ${value} for list update on ${field}`);
}
}
generateListIndexStatement(field, map, meta) {
const statement = this.statement();
Object.keys(map).forEach(idx => {
const value = map[idx];
//
// Index operations are a little bit more complex
// Don't allow negative indicies
// Remark/TODO: This could live in actual validation for these more complex types
//
if (value.index
&& this.typeOf(value.index) === 'object'
&& Object.keys(value.index).length) {
this.generateListIndexStatement(field, value.index, meta);
if (!positiveIntRegex.test(String(idx))) {
throw new Error(`Tried to insert an invalid index "${idx}" into list "${field}"`);
}
break;
default:
throw new Error('Invalid value ' + value + ' for list update on ' + field);
statement.cql.push(`${field}[${idx}] = ?`);
//
// Remark: Override the hint associated with the `field` because it comes out wrong
// for this list operation. We pass the listType in this case to override
// the "hint" because its not an array here
//
statement.params.push(this.schema.valueOf(field, value, meta.listType));
});
}
};
UpdateStatement.prototype.generateListIndexStatement = function (field, map, meta) {
var statement = this.statement();
Object.keys(map).forEach(function (idx) {
var value = map[idx];
//
// Update a standard cassandra column
//
columnUpdate(field, value) {
const statement = this.statement();
statement.cql.push(`${field} = ?`);
//
// Don't allow negative indicies
// Remark/TODO: This could live in actual validation for these more complex types
// This should be the correct value returned
//
if (!positiveIntRegex.test(idx + '')) {
throw new Error('Tried to insert an invalid index "' + idx + '" into list "' + field + '"');
}
statement.cql.push(field + '[' + idx + '] = ?');
statement.params.push(this.schema.valueOf(field, value));
}
//
// Generate the more complicated collection operation statement's used here.
//
generateCollectionStatement(opts) {
const statement = this.statement(this.index[opts.type]);
//
// Remark: Override the hint associated with the `field` because it comes out wrong
// for this list operation. We pass the listType in this case to override
// the "hint" because its not an array here
// Generate the appropriate statement based on if its a suffix or not for any
// generic collection
//
statement.params.push(this.schema.valueOf(field, value, meta.listType));
}, this);
};
const valueExpr = opts.suffix
? `${opts.field} ${opts.operator} ?`
: `? ${opts.operator} ${opts.field}`;
statement.cql.push(`${opts.field} = ${valueExpr}`);
statement.params.push(this.schema.valueOf(opts.field, opts.value));
//
// Remark: Only set and list operations need to ensure subsequent commands exist on
// a new statement
//
if (['list', 'set'].indexOf(opts.type) !== -1) this.index[opts.type]++;
}
//
// Update a standard cassandra column
//
UpdateStatement.prototype.columnUpdate = function (field, value) {
var statement = this.statement();
statement.cql.push(field + ' = ?');
//
// This should be the correct value returned
// Return the current statement to modify
// Question: Will index counters of different types end up returning the same statement?
//
statement.params.push(this.schema.valueOf(field, value));
};
statement(idx) {
idx = idx || 0;
while (idx >= this.buffer.length) {
this.buffer.push(new PartialStatement());
}
//
// Generate the more complicated collection operation statement's used here.
//
UpdateStatement.prototype.generateCollectionStatement = function (opts) {
var statement = this.statement(this.index[opts.type]);
return this.buffer[idx];
}
//
// Generate the appropriate statement based on if its a suffix or not for any
// generic collection
// Finalize a statement given a partial statement
//
statement.cql.push(
opts.field + ' = ' +
(opts.suffix
? (opts.field + ' ' + opts.operator + ' ?')
: ('? ' + opts.operator + ' ' + opts.field)
)
);
statement.params.push(this.schema.valueOf(opts.field, opts.value));
finalizeStatement(options, table, partial) {
const statement = new Statement(this.schema);
const ttl = options.ttl ? ` USING TTL ${options.ttl}` : '';
if (!partial.delete) {
statement.cql += `UPDATE ${table}${ttl} SET ${partial.cql.join(', ')} WHERE ${this.criteria.cql}`;
} else {
statement.cql += `DELETE ${partial.cql.join(', ')} FROM ${this.table} WHERE ${this.criteria.cql}`;
}
statement.params = partial.params.concat(clone(this.criteria.params));
statement.options = this.options;
return statement;
}
//
// Remark: Only set and list operations need to ensure subsequent commands exist on
// a new statement
// Return a proper value when testing for list specific properties that does not
// have shared semantics with set
//
if (['list', 'set'].indexOf(opts.type) !== -1) this.index[opts.type]++;
};
_handleList(list, prev) {
let value = prev || [];
//
// Run the functions that set and list share first
//
value = this._handleSetOrList(list, prev);
//
// Return the current statement to modify
// Question: Will index counters of different types end up returning the same statement?
//
UpdateStatement.prototype.statement = function (idx) {
idx = idx || 0;
while (idx >= this.buffer.length) {
this.buffer.push(new PartialStatement());
}
if (list.prepend && Array.isArray(list.prepend)) {
value.shift.apply(value, list.prepend);
}
return this.buffer[idx];
};
if (list.index && this.typeOf(list.index) === 'object') {
Object.keys(list.index).forEach(function (idx) {
//
// Don't allow any dangerous operations, and maybe error here
//
if (idx >= value.length) return;
//
// Set the value to the index value of the array
//
value[+idx] = list.index[idx];
});
}
//
// Finalize a statement given a partial statement
//
UpdateStatement.prototype.finalizeStatement = function (options, table, partial) {
var statement = new Statement(this.schema);
var ttl = options.ttl ? util.format(' USING TTL %d', options.ttl) : '';
if (!partial.delete) {
statement.cql += 'UPDATE ' + table + ttl + ' SET ' +
partial.cql.join(', ') + ' WHERE ' + this.criteria.cql;
} else {
statement.cql += 'DELETE ' + partial.cql.join(', ') + ' FROM ' +
this.table + ' WHERE ' + this.criteria.cql;
return value;
}
statement.params = partial.params.concat(clone(this.criteria.params));
statement.options = this.options;
return statement;
};
//
// Special statement used for update
//
function PartialStatement() {
this.cql = [];
this.params = [];
this.options = {};
}
//
// Return a proper value when testing for list specific properties that does not
// have shared semantics with set
//
UpdateStatement.prototype._handleList = function handleList(list, prev) {
var value = prev || [];
//
// Run the functions that set and list share first
// Return the proper value given a set or a list and the previous value
//
value = this._handleSetOrList(list, prev);
_handleSetOrList(sol, prev) {
const value = prev || [];
const add = sol.add || sol.append;
if (list.prepend && Array.isArray(list.prepend)) {
value.shift.apply(value, list.prepend);
}
if (Array.isArray(sol)) {
return sol;
}
//
// Handle the add or append case for sets or lists by pushing to the back of
// the array
//
if (add && Array.isArray(add)) {
value.push.apply(value, add);
}
if (list.index && this.typeOf(list.index) === 'object') {
Object.keys(list.index).forEach(function (idx) {
if (sol.remove && Array.isArray(sol.remove)) {
//
// Don't allow any dangerous operations, and maybe error here
// Iterate through the remove array and splice out the index if it
// exists in the previous array. This simulates the Cassandra
// operation
//
if (idx >= value.length) return;
//
// Set the value to the index value of the array
//
value[+idx] = list.index[idx];
});
}
sol.remove.forEach(function (key) {
const idx = value.indexOf(key);
if (idx === -1) return;
value.splice(idx, 1);
});
}
return value;
};
//
// Return the proper value given a set or a list and the previous value
//
UpdateStatement.prototype._handleSetOrList = function handleSetOrList(sol, prev) {
var value = prev || [];
var add = sol.add || sol.append;
if (Array.isArray(sol)) {
return sol;
return value;
}
//
// Handle the add or append case for sets or lists by pushing to the back of
// the array
//
if (add && Array.isArray(add)) {
value.push.apply(value, add);
}
}
if (sol.remove && Array.isArray(sol.remove)) {
//
// Iterate through the remove array and splice out the index if it
// exists in the previous array. This simulates the Cassandra
// operation
//
sol.remove.forEach(function (key) {
var idx = value.indexOf(key);
if (idx === -1) return;
value.splice(idx, 1);
});
}
return value;
};
module.exports = UpdateStatement;

@@ -0,113 +1,109 @@

const async = require('async');
class StatementCollection {
/*
* Constructor function for the StatementCollection can decide
* on using a C* batching or non-batching strategy.
*
* @param {Driver} Connection which these statements are associated.
*/
constructor(connection, strategy) {
this.statements = [];
this.connection = connection;
this.strategy = strategy || 'batch';
//
// Default our consistency to local-quorum as it seems reasonable
//
this._consistency = 'localQuorum';
var async = require('async');
/*
* Constructor function for the StatementCollection can decide
* on using a C* batching or non-batching strategy.
*
* @param {Driver} Connection which these statements are associated.
*/
var StatementCollection = module.exports = function StatementCollection(connection, strategy) {
this.statements = [];
this.connection = connection;
this.strategy = strategy || 'batch';
//
// Default our consistency to local-quorum as it seems reasonable
//
this._consistency = 'localQuorum';
//
// If strategy is not "batch" then it is the upper bound of
// concurrency for the number of statements to execute at once.
//
if (this.strategy !== 'batch') {
this.strategy = this.strategy && !isNaN(this.strategy)
? this.strategy
: 5;
//
// If strategy is not "batch" then it is the upper bound of
// concurrency for the number of statements to execute at once.
//
if (this.strategy !== 'batch') {
this.strategy = this.strategy && !isNaN(this.strategy)
? this.strategy
: 5;
}
}
};
/**
* Hava a way to set consistency for a statementCollection that gets submitted
* @param {Object} consistency - Consistency object
* @returns {exports} - The statement collection object to be returned
*/
StatementCollection.prototype.consistency = function (consistency) {
if (!consistency) {
/**
* Hava a way to set consistency for a statementCollection that gets submitted
* @param {Object} consistency - Consistency object
* @returns {exports} - The statement collection object to be returned
*/
consistency(consistency) {
if (!consistency) {
return this;
}
this._consistency = consistency;
return this;
}
this._consistency = consistency;
return this;
};
/*
* Executes the set of statements using the appropriate strategy
* (batched or non-batched). "Non-batches" means the set of statements
* is executed in parallel as independent queries.
*/
StatementCollection.prototype.execute = function (callback) {
if (this.strategy === 'batch') {
return this.executeBatch(callback);
}
/*
* Executes the set of statements using the appropriate strategy
* (batched or non-batched). "Non-batches" means the set of statements
* is executed in parallel as independent queries.
*/
execute(callback) {
if (this.strategy === 'batch') {
return this.executeBatch(callback);
}
var self = this;
async.eachLimit(
this.statements,
this.strategy,
function (statement, next) {
var query = statement.batch
? self.returnBatch(statement.statements)
: statement.extendQuery(
self.connection.beginQuery()
);
async.eachLimit(
this.statements,
this.strategy,
(statement, next) => {
const query = statement.batch
? this.returnBatch(statement.statements)
: statement.extendQuery(this.connection.beginQuery());
query.consistency(self._consistency);
query.consistency(this._consistency);
query.execute(next);
},
callback
);
}
query.execute(next);
/*
* Executes the set of statements as a Cassandra batch representing
* a single transation.
*/
executeBatch(callback) {
const batch = this.returnBatch(this.statements)
.consistency(this._consistency);
//
// TODO: How do we handle these additional "options":
// - `.timestamp()`
// This should be exposed via some option passed to the model in the future
//
},
callback
);
};
batch.execute(callback);
}
/*
* Executes the set of statements as a Cassandra batch representing
* a single transation.
*/
StatementCollection.prototype.executeBatch = function (callback) {
var batch = this.returnBatch(this.statements)
.consistency(this._consistency);
//
// TODO: How do we handle these additional "options":
// - `.timestamp()`
// This should be exposed via some option passed to the model in the future
// Recursively build the batch including any nested batch statements that need
// to be built as well. This allows us to have nested batches within an
// execution of just individual statements as well!
//
returnBatch(statements) {
const batch = this.connection.beginBatch();
statements.forEach(function (statement) {
if (statement.batch) {
return batch.add(this.returnBatch(statement.statements));
}
batch.add(statement.extendQuery(this.connection.beginQuery()));
}, this);
batch.execute(callback);
};
return batch;
}
//
// Recursively build the batch including any nested batch statements that need
// to be built as well. This allows us to have nested batches within an
// execution of just individual statements as well!
//
StatementCollection.prototype.returnBatch = function (statements) {
var batch = this.connection.beginBatch();
statements.forEach(function (statement) {
if (statement.batch) {
return batch.add(this.returnBatch(statement.statements));
}
batch.add(statement.extendQuery(this.connection.beginQuery()));
}, this);
/*
* Add a statement to the collection associated with this instance
*/
add(statement) {
this.statements.push(statement);
}
}
return batch;
};
/*
* Add a statement to the collection associated with this instance
*/
StatementCollection.prototype.add = function (statement) {
this.statements.push(statement);
};
module.exports = StatementCollection;
{
"name": "datastar",
"version": "3.0.2",
"version": "4.0.0-beta.1",
"description": "Now witness the power of this FULLY ARMED AND OPERATIONAL DATASTAR!",

@@ -9,10 +9,6 @@ "main": "lib",

"clone": "^1.0.4",
"joi-of-cql": "^2.0.1",
"list-stream": "^1.0.0",
"joi-of-cql": "^2.0.2",
"lodash.pick": "^4.0.1",
"object-assign": "^4.0.1",
"one-time": "0.0.4",
"priam": "^3.1.0",
"read-only-stream": "^2.0.0",
"through2": "^2.0.0",
"priam": "^4.0.0-beta.2",
"tinythen": "^1.0.1",

@@ -28,7 +24,6 @@ "to-camel-case": "^1.0.0",

"cassandra-driver": "^4.1.0",
"eslint": "^4.19.1",
"eslint-config-godaddy": "^2.1.0",
"eslint-plugin-json": "^1.2.1",
"eslint-plugin-mocha": "^4.12.1",
"eslint-plugin-react": "^3.16.1",
"eslint": "^6.7.1",
"eslint-config-godaddy": "^4.0.0",
"eslint-plugin-json": "^1.4.0",
"eslint-plugin-mocha": "^5.3.0",
"lodash.clonedeep": "^4.5.0",

@@ -59,4 +54,7 @@ "mocha": "^5.2.0",

],
"engines": {
"node": "^10.17.0 || >=12.3.0"
},
"author": "GoDaddy Engineers",
"license": "MIT"
}

@@ -46,15 +46,17 @@ # datastar

``` js
var Datastar = require('datastar');
var datastar = new Datastar({
const Datastar = require('datastar');
const datastar = new Datastar({
config: {
user: 'cassandra',
password: 'cassandra',
credentials: {
username: 'cassandra',
password: 'cassandra'
},
keyspace: 'a_fancy_keyspace',
hosts: ['127.0.0.1', 'host2', 'host3']
contactPoints: ['127.0.0.1', 'host2', 'host3']
}
}).connect();
var cql = datastar.schema.cql;
const cql = datastar.schema.cql;
var Artist = datastar.define('artist', {
const Artist = datastar.define('artist', {
schema: datastar.schema.object({

@@ -97,13 +99,15 @@ artist_id: cql.uuid(),

```js
var Datastar = require('datastar');
var datastar = new Datastar({
const Datastar = require('datastar');
const datastar = new Datastar({
config: {
// who am I connecting as
user: 'cassandra',
// what's my password
password: 'cassandra',
credentials: {
// who am I connecting as
username: 'cassandra',
// what's my password
password: 'cassandra'
},
// what keyspace am I using
keyspace: 'a_fancy_keyspace',
// what cluster hosts do I know about
hosts: ['127.0.0.1', 'host2', 'host3']
contactPoints: ['127.0.0.1', 'host2', 'host3']
}

@@ -118,3 +122,3 @@ });

```js
var datastar = new Datastar(...);
let datastar = new Datastar(...);
// I setup the connection, but I'm not connected yet, let's connect!

@@ -133,3 +137,3 @@ datastar = datastar.connect();

//
var Album = datastar.define('album', {
const Album = datastar.define('album', {
//

@@ -187,3 +191,3 @@ // ensure that the table exists. This executes an implicit CREATE IF NOT EXISTS

```js
var Album = datastar.define('album', {
const Album = datastar.define('album', {
schema: albumSchema,

@@ -283,3 +287,3 @@ readConsistency: 'localQuorum',

```js
var Artist = datastar.define('artist', {
const Artist = datastar.define('artist', {
schema: datastar.schema.object({

@@ -302,5 +306,4 @@ artist_id: cql.uuid(),

});
```
```
In our example above we added `name` as a `lookupKey` to our `Artist` model. This means a few things:

@@ -327,3 +330,2 @@

### Model.create

@@ -334,5 +336,5 @@

``` js
var cql = datastar.schema.cql;
const cql = datastar.schema.cql;
var Beverage = datastar.define('beverage', {
const Beverage = datastar.define('beverage', {
schema: datastar.schema.object({

@@ -370,4 +372,3 @@ 'beverage_id': cql.uuid({ default: 'v4' }),

// and the second with properties2
Model.create({ entities [properties, properties2] })
Model.create({ entities: [properties, properties2] })
```

@@ -444,3 +445,3 @@

```js
var Person = datastar.define('person', {
const Person = datastar.define('person', {
ensureTables: true,

@@ -466,3 +467,3 @@ schema: datastar.schema.object({

//
var person = {
const person = {
name: 'Fred Flinstone',

@@ -673,3 +674,3 @@ attributes: {

```js
var through = require('through2');
const { Transform } = require('stream');

@@ -688,7 +689,11 @@ //

})
.pipe(through.obj(function (bev, enc, callback) {
//
// Massage the beverage object in some way before returning it to the user
//
callback(null, massageBeverage(bev));
.pipe(new Transform({
writableObjectMode: true,
readableObjectMode: true,
transform(bev, enc, callback) {
//
// Massage the beverage object in some way before returning it to the user
//
callback(null, massageBeverage(bev));
}
}))

@@ -699,2 +704,18 @@ .pipe(res);

#### Async Iterable API
The async iterable API, like the stream API, provides another convenient way to process records as they come in. If you do not need the full feature set of node streams, this is a more efficient technique.
To access the async iterable API, set the `iterable` option to `true` in your call to `.find()` or `.findAll()`. Alternately, call the `.iterate()` method of your model, which is equivalent to `.findAll({ ..., iterable: true })`.
```js
async function getAllArtistTracks(artistId) {
let allTracks = [];
for await (const album of Album.iterate({ conditions: { artistId } })) {
allTracks = allTracks.concat(album.trackList);
}
return new Set(allTracks);
}
```
### Model.remove

@@ -771,8 +792,7 @@

Before build is before we create the `statement(s)` that we then collect to
execute and insert into Cassandra. This allows us to modify any of the
entities before CQL is generated for them.
```js
//
// Before build is before we create the `statement(s)` that we then collect to
// execute and insert into Cassandra. This allows us to modify any of the
// entities before CQL is generated for them
//
Beverage.before('create:build', function (options, callback) {

@@ -806,11 +826,12 @@ //

});
```
//
// Before execute is right before we actually send the statements to cassandra!
// This is where we have a chance to modify the statements or
// `StatementCollection` with any other statements we may have or even to just
// the `consistency` we are asking of cassandra if there is only a narrow case
// where you require a consistency of `one`. (You could also just pass
// option.consistency into the function call as well)
//
Before execute is right before we actually send the statements to cassandra.
This is where we have a chance to modify the statements or
`StatementCollection` with any other statements we may have or even to just
the `consistency` we are asking of cassandra if there is only a narrow case
where you require a consistency of `one`. (You could also just pass
option.consistency into the function call as well)
```js
Beverage.before('create:execute', function (options, callback) {

@@ -828,12 +849,13 @@ if (options.commitFast) {

});
```
//
// An `after` hook might for `execute` might look like this if we wanted to
// insert the same data into a separate keyspace using a different `Priam`
// instance. Which would be a separate connection to cassandra. This call is
// ensured to be executed before the `Beverage.create(opts, callback)` function
// calls its callback.
// NOTE: This assumes the same columns exist in this other keyspace
//
var otherDataCenterConnection = new Priam(connectOpts);
An `after` hook for `execute` might look like this if we wanted to
insert the same data into a separate keyspace using a different `Priam`
instance. Which would be a separate connection to cassandra. This call is
ensured to be executed before the `Beverage.create(opts, callback)` function
calls its callback.
NOTE: This assumes the same columns exist in this other keyspace
```js
const otherDataCenterConnection = new Priam(connectOpts);
Beverage.after('create:execute', function (options, callback) {

@@ -848,19 +870,19 @@ //

});
```
//
// The last type of hook we have is for the specific `find` operations
// including. `find:all`, `find:one`, `find:count`, `find:first`. These specifc
// are the same as the above `:build` hooks in when they execute but have
// different and more useful semantics for `after` hooks for modifying data
// fetched. This makes use of [`Understudy's`][understudy] `.waterfall`
// function.
//
The last types of hook we have is for the specific `find` operations
including. `find:all`, `find:one`, `find:count`, `find:first`. These specifc
are the same as the above `:build` hooks in when they execute but have
different and more useful semantics for `after` hooks for modifying data
fetched. This makes use of [`Understudy's`][understudy] `.waterfall`
function. An important caveat is that `find` hooks are _not_ executed when you
use the streaming option. If you want to convert all records from your queries, see [Record Transformation](#record-transformation) for a technique that works for all types of queries.
//
// The after hooks on `find:one` allow us to mutate the result returned from any
// `findOne` query taken on beverage. This could allow us to call an external
// service to fetch extra properties or anything else you can think of. The main
// goal is to provide the extensibility to do what you want without `datastar`
// getting in your way.
//
This after hooks on `find:one` allow us to mutate the result returned from any
`findOne` query taken on beverage. This could allow us to call an external
service to fetch extra properties or anything else you can think of. The main
goal is to provide the extensibility to do what you want without `datastar`
getting in your way.
```js
Beverage.after('find:one', function (result, callback) {

@@ -879,8 +901,8 @@ //

});
```
//
// We can even add another after hook after this one which will get executed in
// series and be able to modify any new attributes!
//
We can even add another after hook after this one which will get executed in
series and be able to modify any new attributes!
```js
Beverage.after('find:one', function (result, callback) {

@@ -898,3 +920,3 @@ //

//
var update = bev.toJSON();
const update = bev.toJSON();
//

@@ -916,5 +938,14 @@ // Do the more efficient update to cassandra on the `set` type.

});
});
```
### Record Transformation
Assign a `transform` method to your model to synchronously modify all records coming back from a query; this technique works for the callback, streaming, or async iterable methods of querying:
```js
Beverage.transform = before => ({
...before,
isDiet: before.sugar <= 0
})
```

@@ -928,3 +959,3 @@

```js
var Spice = datastar.define('spice', {
const Spice = datastar.define('spice', {
ensureTables: true,

@@ -1100,3 +1131,4 @@ schema: /* a valid schema */

- [Bill Enterline](https://github.com/enterline)
- [Jacob Page](https://github.com/DullReferenceException)
[understudy]: https://github.com/bmeck/understudy
{
"cassandra": {
"user": "cassandra",
"password": "cassandra",
"credentials": {
"username": "cassandra",
"password": "cassandra"
},
"keyspace": "datastar",
"hosts": ["127.0.0.1"],
"contactPoints": ["127.0.0.1"],
"localDataCenter": "datacenter1"
}
}

@@ -0,7 +1,4 @@

const joi = require('joi-of-cql');
const cql = joi.cql;
var joi = require('joi-of-cql');
var cql = joi.cql;
/**

@@ -74,2 +71,1 @@ * @property schemas {Object}

};

@@ -1,2 +0,3 @@

var path = require('path'),
const
path = require('path'),
util = require('util'),

@@ -6,3 +7,3 @@ Datastar = require('../../lib'),

var model = Datastar.Model;
const model = Datastar.Model;

@@ -34,12 +35,12 @@ /*

function createKeyspace(data) {
var client = new cassandra.Client({
contactPoints: data.cassandra.hosts,
const client = new cassandra.Client({
contactPoints: data.cassandra.contactPoints,
localDataCenter: data.cassandra.localDataCenter,
authProvider: new cassandra.auth.PlainTextAuthProvider(
data.cassandra.user,
data.cassandra.password
data.cassandra.credentials.username,
data.cassandra.credentials.password
)
});
client.execute('CREATE KEYSPACE IF NOT EXISTS ' + data.cassandra.keyspace + ' WITH replication = {\'class\' : \'SimpleStrategy\', \'replication_factor\' : 1};', function (err) {
client.execute(`CREATE KEYSPACE IF NOT EXISTS ${data.cassandra.keyspace} WITH replication = {'class' : 'SimpleStrategy', 'replication_factor' : 1};`, function (err) {
if (err) return callback(err);

@@ -65,3 +66,3 @@ client.shutdown();

//
var configFile = process.env.DATASTAR_CONFIG || path.join(__dirname, '..', 'config', 'config.example.json');
const configFile = process.env.DATASTAR_CONFIG || path.join(__dirname, '..', 'config', 'config.example.json');

@@ -106,2 +107,3 @@ return createKeyspace(require(configFile));

model.before = sinon.stub();
model.waterfall = sinon.stub();
model.ensureTables = sinon.stub();

@@ -108,0 +110,0 @@

/* jshint camelcase: false */
const assume = require('assume'),
const
{ Stream } = require('stream'),
assume = require('assume'),
uuid = require('uuid'),

@@ -20,6 +22,2 @@ async = require('async'),

datastar = helpers.connectDatastar({ config: data.cassandra }, Datastar, done);
/* cassandra = new driver.Client({
contactPoints: data.cassandra.hosts,
keyspace: data.cassandra.keyspace
});*/
});

@@ -183,3 +181,3 @@ });

(result, next) => {
var res = result[0];
const res = result[0];
assume(result.length).to.equal(1);

@@ -563,5 +561,5 @@ assume(res.id).to.be.a('string');

async.parallel({
otherId: Song.findOne.bind(Song, { conditions: { otherId: ids.otherId || otherId }}),
uniqueId: Song.findOne.bind(Song, { conditions: { uniqueId: ids.uniqueId || uniqueId }}),
id: Song.findOne.bind(Song, { conditions: { id: ids.id || id }})
otherId: Song.findOne.bind(Song, { conditions: { otherId: ids.otherId || otherId } }),
uniqueId: Song.findOne.bind(Song, { conditions: { uniqueId: ids.uniqueId || uniqueId } }),
id: Song.findOne.bind(Song, { conditions: { id: ids.id || id } })
}, callback);

@@ -812,3 +810,3 @@ }

nullableId: zeros
}, ttl: 3 }, err => {
}, ttl: 1 }, err => {
assume(err).is.falsey();

@@ -826,3 +824,3 @@

});
}, 3000);
}, 1100);
});

@@ -848,3 +846,3 @@ });

});
}, 3000);
}, 100);
});

@@ -890,3 +888,3 @@ });

it('should update a record in the database with an updated reset ttl and can be found before it reaches the updated ttl', done => {
Foo.update({ entity: { fooId: seven, secondaryId: one, something: 'boo' }, ttl: 3 }, err => {
Foo.update({ entity: { fooId: seven, secondaryId: one, something: 'boo' }, ttl: 1 }, err => {
assume(err).is.falsey();

@@ -909,3 +907,3 @@

});
}, 5000);
}, 1100);
});

@@ -917,3 +915,3 @@ });

it('should update a record in the database with an updated reset ttl and expire after it reaches the updated ttl', done => {
Foo.update({ entity: { fooId: eight, secondaryId: one, something: 'boo' }, ttl: 2 }, err => {
Foo.update({ entity: { fooId: eight, secondaryId: one, something: 'boo' }, ttl: 1 }, err => {
assume(err).is.falsey();

@@ -926,3 +924,3 @@

Foo.update({ entity: { fooId: eight, secondaryId: one, something: 'foo' }, ttl: 3 }, error => {
Foo.update({ entity: { fooId: eight, secondaryId: one, something: 'foo' }, ttl: 1 }, error => {
assume(error).is.falsey();

@@ -936,3 +934,3 @@

});
}, 3000);
}, 1100);
});

@@ -992,2 +990,76 @@ });

describe('async iterable functionality', () => {
let artistId, Album;
const YEAR = 365 * 24 * 60 * 60 * 1000;
before(done => {
artistId = uuid();
Album = datastar.define('album', { schema: schemas.album });
async.auto({
tableCreated: next => Album.ensureTables(next),
createRows: ['tableCreated', next => {
async.parallel([
Album.create.bind(Album, {
id: uuid(),
artistId,
trackList: ['a', 'b'],
releaseDate: new Date(Date.now() - 2 * YEAR)
}),
Album.create.bind(Album, {
id: uuid(),
artistId,
trackList: ['c', 'd'],
releaseDate: new Date(Date.now() - 120 * 24 * 60 * 60 * 1000)
})
], next);
}]
}, done);
});
after(done => {
Album.dropTables(done);
});
it('can be invoked through an `iterable` flag sent to `find`', async () => {
await testIterable(() => Album.findAll({
conditions: { artistId },
iterable: true
}));
});
it('can be invoked through an `iterate` method', async () => {
await testIterable(() => Album.iterate({ conditions: { artistId } }));
});
it('applies any transform function', async () => {
Album.transform = before => ({
...before,
newlyReleased: before.releaseDate.getTime() > (Date.now() - YEAR)
});
let newReleaseCount = 0;
for await (const album of Album.iterate({ conditions: { artistId } })) {
if (album.newlyReleased) {
newReleaseCount++;
}
}
assume(newReleaseCount).equals(1);
});
async function testIterable(iterateFn) {
const iterable = iterateFn();
assume(iterable).not.instanceof(Stream);
let allTracks = [];
for await (const album of iterable) {
allTracks = allTracks.concat(album.trackList);
}
const trackSet = new Set(allTracks);
assume(trackSet.has('a')).equals(true);
assume(trackSet.has('b')).equals(true);
assume(trackSet.has('c')).equals(true);
assume(trackSet.has('d')).equals(true);
}
});
function find(Entity, id, callback) {

@@ -994,0 +1066,0 @@ Entity.findOne({

@@ -1,2 +0,2 @@

var proxyquire = require('proxyquire');
const proxyquire = require('proxyquire');

@@ -3,0 +3,0 @@ //

@@ -1,67 +0,41 @@

var through = require('through2');
var readOnly = require('read-only-stream');
/*
* function Priam (opts)
* Constructor function for the Priam mock responsible for
* mocking our communication with Cassandra.
*/
var Priam = module.exports = function Priam(opts) {
this.options = opts;
};
const { PassThrough } = require('stream');
/*
* Alias the priam connect function
*/
Priam.prototype.connect = function (keyspace, callback) {
if (!callback) {
callback = keyspace;
keyspace = null;
class Chainable {
/*
* function Chainable
* Constructor function for a mock batch
* or query.
*/
constructor() {
this.statements = [];
}
setImmediate(callback);
};
/*
* function add (statement)
* Adds the statement to this Chainable instance.
*/
add(statement) {
this.statements.push(statement);
return this;
}
/*
* function beginBatch ()
* function beginQuery ()
* Begins a new batch.
*/
Priam.prototype.beginBatch =
Priam.prototype.beginQuery = function () {
return new Chainable();
};
stream() {
const stream = new PassThrough({ objectMode: true });
stream.end();
return stream;
}
/*
* function Chainable
* Constructor function for a mock batch
* or query.
*/
function Chainable() {
this.statements = [];
*iterate() {}
/*
* function execute (callback)
* Invokes the callback in the next tick
*/
execute(callback) {
setImmediate(callback);
return this;
}
}
/*
* function add (statement)
* Adds the statement to this Chainable instance.
*/
Chainable.prototype.add = function (statement) {
this.statements.push(statement);
return this;
};
Chainable.prototype.stream = function () {
var stream = through.obj();
stream.end();
return readOnly(stream);
};
/*
* function execute (callback)
* Invokes the callback in the next tick
*/
Chainable.prototype.execute = function (callback) {
setImmediate(callback);
return this;
};
/*
* function query (cql)

@@ -75,9 +49,45 @@ * function options (obj)

Chainable.prototype.single =
Chainable.prototype.consistency =
Chainable.prototype.first =
Chainable.prototype.query =
Chainable.prototype.options =
Chainable.prototype.params = function () {
return this;
};
Chainable.prototype.consistency =
Chainable.prototype.first =
Chainable.prototype.query =
Chainable.prototype.options =
Chainable.prototype.params = function () {
return this;
};
class Priam {
/*
* function Priam (opts)
* Constructor function for the Priam mock responsible for
* mocking our communication with Cassandra.
*/
constructor(opts) {
this.options = opts;
}
/*
* Alias the priam connect function
*/
connect(keyspace, callback) {
if (!callback) {
callback = keyspace;
keyspace = null;
}
setImmediate(callback);
}
beginQuery() {
return new Chainable();
}
}
/*
* function beginBatch ()
* function beginQuery ()
* Begins a new batch.
*/
Priam.prototype.beginBatch = Priam.prototype.beginQuery;
module.exports = Priam;

@@ -46,5 +46,5 @@

it('should return the validation error', () => {
it('should throw the validation error', () => {
dog.id = 'invalid guid';
assume(dog.validate()).is.instanceof(Error);
assume(() => dog.validate()).throws();
});

@@ -51,0 +51,0 @@ });

@@ -30,3 +30,3 @@

// eslint-disable-next-line
Schema('invalid', invalid);
new Schema('invalid', invalid);
}

@@ -40,3 +40,3 @@

// eslint-disable-next-line
Schema('has-dashes');
new Schema('has-dashes');
}

@@ -63,5 +63,3 @@

debug(schema.validator);
const valid = schema.validate(schema.fixKeys(entity));
assume(valid.details).is.truthy();
debug(valid);
assume(() => schema.validate(schema.fixKeys(entity))).throws();
});

@@ -68,0 +66,0 @@

@@ -29,3 +29,3 @@

assume(statement.cql).to.equal('SELECT ' + fieldList + ' FROM artist');
assume(statement.cql).to.equal(`SELECT ${fieldList} FROM artist`);
});

@@ -36,3 +36,3 @@

assume(statement.cql).to.equal('SELECT ' + fieldList + ' FROM artist ALLOW FILTERING');
assume(statement.cql).to.equal(`SELECT ${fieldList} FROM artist ALLOW FILTERING`);
});

@@ -54,7 +54,7 @@

assume(statement.cql).to.equal('SELECT ' + fieldList + ' FROM artist LIMIT 2');
assume(statement.cql).to.equal(`SELECT ${fieldList} FROM artist LIMIT 2`);
});
it('should return an error when passed conditions that get filtered (non primary keys)', () => {
const statement = builder.find({
it('should throw an error when passed conditions that get filtered (non primary keys)', () => {
assume(() => builder.find({
type: 'find',

@@ -64,5 +64,3 @@ conditions: {

}
});
assume(statement).is.an.Error;
})).throws();
});

@@ -78,3 +76,3 @@

assume(statement.cql).to.equal('SELECT ' + fieldList + ' FROM artist WHERE artist_id <= ? AND artist_id > ?');
assume(statement.cql).to.equal(`SELECT ${fieldList} FROM artist WHERE artist_id <= ? AND artist_id > ?`);
assume(statement.params.length).to.equal(2);

@@ -96,4 +94,4 @@ assume(statement.params[0].value).to.equal('2345');

it('(Find test) should return an error when passed conditions with conflicting lookup tables', () => {
const statement = b.find({
it('(Find test) should throw an error when passed conditions with conflicting lookup tables', () => {
assume(() => b.find({
type: 'single',

@@ -104,5 +102,3 @@ conditions: {

}
});
assume(statement).is.instanceof(Error);
})).throws();
});

@@ -133,7 +129,7 @@

it('should return a validation error when trying to create without all of the lookup tables', () => {
it('should throw a validation error when trying to create without all of the lookup tables', () => {
const ent = clone(entity);
delete ent.name;
const statement = b.create({}, ent);
assume(statement).is.instanceof(Error);
assume(() => b.create({}, ent)).throws();
});

@@ -201,10 +197,8 @@

it('should return an error when trying to delete with no conditions when there is a lookup table', () => {
const statement = b.remove({}, {});
assume(statement).is.instanceof(Error);
it('should throw an error when trying to delete with no conditions when there is a lookup table', () => {
assume(() => b.remove({}, {})).throws();
});
it('should return an error FOR NOW when trying to delete with insufficient conditions', () => {
const statement = b.remove({}, { id: uuid.v4() });
assume(statement).is.instanceof(Error);
it('should throw an error FOR NOW when trying to delete with insufficient conditions', () => {
assume(() => b.remove({}, { id: uuid.v4() })).throws();
});

@@ -217,10 +211,8 @@ });

describe('AlterStatement', () => {
it('should return an error when given a bad type', () => {
const statement = builder.alter({ type: 'NANANANA' });
assume(statement).is.instanceof(Error);
it('should throw an error when given a bad type', () => {
assume(() => builder.alter({ type: 'NANANANA' })).throws();
});
it('should return an error when given an unknown arg type', () => {
const statement = builder.alter({
it('should throw an error when given an unknown arg type', () => {
assume(() => builder.alter({
type: 'table',

@@ -230,5 +222,3 @@ actions: {

}
});
assume(statement).is.instanceof(Error);
})).throws();
});

@@ -240,4 +230,4 @@ });

it('build()', done => {
var schema = new Schema('artist', schemas.artist);
var builder = new StatementBuilder(schema);
const schema = new Schema('artist', schemas.artist);
const builder = new StatementBuilder(schema);
const statement = builder.table({ type: 'ensure' });

@@ -262,5 +252,5 @@ fs.readFile(path.join(fixturesDir, 'tables', 'artist.cql'), 'utf8', (err, data) => {

if (options.useIndex) {
tableName = schema.name + '_' + options.lookupKey;
tableName = `${schema.name}_${options.lookupKey}`;
} else {
tableName = schema.name + '_by_' + options.lookupKey.replace(/_\w+$/, '');
tableName = `${schema.name}_by_${options.lookupKey.replace(/_\w+$/, '')}`;
}

@@ -292,9 +282,7 @@ }

it('should return an error when given a bad key to orderBy', () => {
const statement = builder.table({
it('should throw an error when given a bad key to orderBy', () => {
assume(() => builder.table({
type: 'ensure',
orderBy: { key: 'createdAt' }
});
assume(statement).is.instanceof(Error);
})).throws();
});

@@ -365,3 +353,3 @@

// NOTE: Not a lookup table:
assume(cql.indexOf('on ' + schema.name + '(artist_id)')).is.above(-1);
assume(cql.indexOf(`on ${schema.name}(artist_id)`)).is.above(-1);
});

@@ -396,3 +384,3 @@

// NOTE: Not a lookup table:
assume(statement.cql.indexOf('on ' + schema.name + '(artist_id)')).is.above(-1);
assume(statement.cql.indexOf(`on ${schema.name}(artist_id)`)).is.above(-1);
});

@@ -411,3 +399,3 @@

assume(cql.indexOf('CREATE TABLE IF NOT EXISTS')).is.above(-1);
assume(cql.indexOf('PRIMARY KEY (' + options.lookupKey + ')')).is.above(-1);
assume(cql.indexOf(`PRIMARY KEY (${options.lookupKey})`)).is.above(-1);
});

@@ -426,3 +414,3 @@

assume(cql.indexOf('CREATE INDEX IF NOT EXISTS')).is.above(-1);
assume(cql.indexOf('on ' + schema.name + '(' + options.lookupKey + ')')).is.above(-1);
assume(cql.indexOf(`on ${schema.name}(${options.lookupKey})`)).is.above(-1);
});

@@ -442,3 +430,3 @@

assume(statement.cql.indexOf('CREATE TABLE IF NOT EXISTS')).is.above(-1);
assume(statement.cql.indexOf('PRIMARY KEY (' + options.lookupKey + ')')).is.above(-1);
assume(statement.cql.indexOf(`PRIMARY KEY (${options.lookupKey})`)).is.above(-1);
});

@@ -480,3 +468,3 @@

assume(statement.cql.indexOf('CREATE INDEX IF NOT EXISTS')).is.above(-1);
assume(statement.cql.indexOf('on ' + schema.name + '(' + options.lookupKey + ')')).is.above(-1);
assume(statement.cql.indexOf(`on ${schema.name}(${options.lookupKey})`)).is.above(-1);
});

@@ -497,11 +485,8 @@ });

it('should return an error when passed an entity without a primary key', () => {
const statement = builder.remove({}, { createDate: new Date() });
assume(statement).is.an.Error;
it('should throw an error when passed an entity without a primary key', () => {
assume(() => builder.remove({}, { createDate: new Date() })).throws();
});
it('should return an error when passed empty conditions ', () => {
const statement = builder.remove({}, {});
assume(statement).is.an.Error;
it('should throw an error when passed empty conditions ', () => {
assume(() => builder.remove({}, {})).throws();
});

@@ -573,9 +558,8 @@

it('should return an error when given an improper entity', () => {
it('should throw an error when given an improper entity', () => {
const ent = clone(entity);
delete ent.artistId;
const statement = builder.create({}, ent);
assume(statement).is.instanceof(Error);
assume(() => builder.create({}, ent)).throws();
});
});
});

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc