Comparing version 0.19.1 to 1.0.0
@@ -69,2 +69,4 @@ var async = require('async'), | ||
if ((err = db.validateKey(key, table)) != null) return cb(err) | ||
options = {TableName: tableName, Key: key} | ||
@@ -76,5 +78,2 @@ if (req._projection) options._projection = req._projection | ||
gets.push(options) | ||
key = db.validateKey(key, table) | ||
if (key instanceof Error) return cb(key) | ||
} | ||
@@ -81,0 +80,0 @@ |
@@ -14,3 +14,2 @@ var async = require('async'), | ||
if (err) { | ||
// TODO: This is a hack... fix this! | ||
if (err.body && (/Missing the key/.test(err.body.message) || /Type mismatch for key/.test(err.body.message))) | ||
@@ -54,15 +53,18 @@ err.body.message = 'The provided key element does not match the schema' | ||
if ((err = db.validateItem(req.PutRequest.Item, table)) != null) return cb(err) | ||
options.Item = req.PutRequest.Item | ||
actions.push(putItem.bind(null, store, options)) | ||
key = db.validateItem(req.PutRequest.Item, table) | ||
key = db.createKey(options.Item, table) | ||
} else if (req.DeleteRequest) { | ||
if ((err = db.validateKey(req.DeleteRequest.Key, table) != null)) return cb(err) | ||
options.Key = req.DeleteRequest.Key | ||
actions.push(deleteItem.bind(null, store, options)) | ||
key = db.validateKey(req.DeleteRequest.Key, table) | ||
key = db.createKey(options.Key, table) | ||
} | ||
if (key instanceof Error) return cb(key) | ||
if (seenKeys[key]) | ||
@@ -77,2 +79,1 @@ return cb(db.validationError('Provided list of item keys contains duplicates')) | ||
} | ||
@@ -8,5 +8,6 @@ var db = require('../db') | ||
var key = db.validateKey(data.Key, table), itemDb = store.getItemDb(data.TableName) | ||
if (key instanceof Error) return cb(key) | ||
if ((err = db.validateKey(data.Key, table)) != null) return cb(err) | ||
var itemDb = store.getItemDb(data.TableName), key = db.createKey(data.Key, table) | ||
itemDb.lock(key, function(release) { | ||
@@ -27,5 +28,9 @@ cb = release(cb) | ||
itemDb.del(key, function(err) { | ||
db.updateIndexes(store, table, existingItem, null, function(err) { | ||
if (err) return cb(err) | ||
cb(null, returnObj) | ||
itemDb.del(key, function(err) { | ||
if (err) return cb(err) | ||
cb(null, returnObj) | ||
}) | ||
}) | ||
@@ -32,0 +37,0 @@ }) |
@@ -0,1 +1,2 @@ | ||
var async = require('async') | ||
@@ -21,2 +22,11 @@ module.exports = function deleteTable(store, data, cb) { | ||
table.TableStatus = 'DELETING' | ||
var deletes = [store.deleteItemDb.bind(store, key)] | ||
;['Local', 'Global'].forEach(function(indexType) { | ||
var indexes = table[indexType + 'SecondaryIndexes'] || [] | ||
deletes = deletes.concat(indexes.map(function(index) { | ||
return store.deleteIndexDb.bind(store, indexType, table.TableName, index.IndexName) | ||
})) | ||
}) | ||
delete table.GlobalSecondaryIndexes | ||
@@ -27,3 +37,3 @@ | ||
store.deleteItemDb(key, function(err) { | ||
async.parallel(deletes, function(err) { | ||
if (err) return cb(err) | ||
@@ -30,0 +40,0 @@ |
@@ -8,26 +8,15 @@ var db = require('../db') | ||
var key = db.validateKey(data.Key, table), itemDb = store.getItemDb(data.TableName) | ||
if (key instanceof Error) return cb(key) | ||
if ((err = db.validateKey(data.Key, table)) != null) return cb(err) | ||
if (data._projection) { | ||
err = db.validateKeyPaths(data._projection.nestedPaths, table) | ||
if (err) return cb(err) | ||
} | ||
if ((err = db.validateKeyPaths((data._projection || {}).nestedPaths, table)) != null) return cb(err) | ||
var itemDb = store.getItemDb(data.TableName), key = db.createKey(data.Key, table) | ||
itemDb.get(key, function(err, item) { | ||
if (err && err.name != 'NotFoundError') return cb(err) | ||
var returnObj = {} | ||
var returnObj = {}, paths = data._projection ? data._projection.paths : data.AttributesToGet | ||
if (item) { | ||
if (data._projection) { | ||
returnObj.Item = db.mapPaths(data._projection.paths, item) | ||
} else if (data.AttributesToGet) { | ||
returnObj.Item = data.AttributesToGet.reduce(function(returnItem, attr) { | ||
if (item[attr] != null) returnItem[attr] = item[attr] | ||
return returnItem | ||
}, {}) | ||
} else { | ||
returnObj.Item = item | ||
} | ||
returnObj.Item = paths ? db.mapPaths(paths, item) : item | ||
} | ||
@@ -34,0 +23,0 @@ |
@@ -9,3 +9,3 @@ var once = require('once'), | ||
if (data.ExclusiveStartTableName) | ||
opts = {start: data.ExclusiveStartTableName + '\x00'} | ||
opts = {gt: data.ExclusiveStartTableName} | ||
@@ -12,0 +12,0 @@ db.lazy(store.tableDb.createKeyStream(opts), cb) |
@@ -8,5 +8,6 @@ var db = require('../db') | ||
var key = db.validateItem(data.Item, table), itemDb = store.getItemDb(data.TableName) | ||
if (key instanceof Error) return cb(key) | ||
if ((err = db.validateItem(data.Item, table)) != null) return cb(err) | ||
var itemDb = store.getItemDb(data.TableName), key = db.createKey(data.Item, table) | ||
itemDb.lock(key, function(release) { | ||
@@ -27,5 +28,9 @@ cb = release(cb) | ||
itemDb.put(key, data.Item, function(err) { | ||
db.updateIndexes(store, table, existingItem, data.Item, function(err) { | ||
if (err) return cb(err) | ||
cb(null, returnObj) | ||
itemDb.put(key, data.Item, function(err) { | ||
if (err) return cb(err) | ||
cb(null, returnObj) | ||
}) | ||
}) | ||
@@ -32,0 +37,0 @@ }) |
@@ -1,4 +0,2 @@ | ||
var events = require('events'), | ||
once = require('once'), | ||
Lazy = require('lazy'), | ||
var once = require('once'), | ||
db = require('../db') | ||
@@ -13,3 +11,3 @@ | ||
var keySchema = table.KeySchema, startKeyNames = keySchema.map(function(key) { return key.AttributeName }), | ||
hashKey = startKeyNames[0], rangeKey = startKeyNames[1], projectionType, indexAttrs, isLocal | ||
hashKey = startKeyNames[0], rangeKey = startKeyNames[1], fetchFromItemDb = false, isLocal | ||
@@ -30,5 +28,3 @@ if (data.IndexName) { | ||
keySchema = index.KeySchema | ||
projectionType = index.Projection.ProjectionType | ||
if (projectionType == 'INCLUDE') | ||
indexAttrs = index.Projection.NonKeyAttributes | ||
fetchFromItemDb = data.Select == 'ALL_ATTRIBUTES' && index.Projection.ProjectionType != 'ALL' | ||
keySchema.forEach(function(key) { if (!~startKeyNames.indexOf(key.AttributeName)) startKeyNames.push(key.AttributeName) }) | ||
@@ -66,11 +62,11 @@ hashKey = keySchema[0].AttributeName | ||
if (~['NULL', 'NOT_NULL', 'NE', 'CONTAINS', 'NOT_CONTAINS', 'IN'].indexOf(comparisonOperator)) { | ||
return cb(db.validationError('Attempted conditional constraint is not an indexable operation')) | ||
return db.validationError('Attempted conditional constraint is not an indexable operation') | ||
} | ||
if (data.KeyConditions[attr].AttributeValueList.some(function(attrVal) { return attrVal[type] == null })) { | ||
return cb(db.validationError('One or more parameter values were invalid: Condition parameter type does not match schema type')) | ||
return db.validationError('One or more parameter values were invalid: Condition parameter type does not match schema type') | ||
} | ||
if (isHash && ~['LE', 'LT', 'GE', 'GT', 'BEGINS_WITH', 'BETWEEN'].indexOf(comparisonOperator)) { | ||
return cb(db.validationError('Query key condition not supported')) | ||
if (isHash && comparisonOperator != 'EQ') { | ||
return db.validationError('Query key condition not supported') | ||
} | ||
@@ -80,11 +76,5 @@ }) | ||
var opts = {}, tableHashKey = startKeyNames[0] | ||
var hashType = Object.keys(data.KeyConditions[hashKey].AttributeValueList[0])[0] | ||
var hashVal = data.KeyConditions[hashKey].AttributeValueList[0][hashType] | ||
if (data.KeyConditions[tableHashKey] && data.KeyConditions[tableHashKey].ComparisonOperator == 'EQ') { | ||
var tableHashType = Object.keys(data.KeyConditions[tableHashKey].AttributeValueList[0])[0] | ||
var tableHashVal = data.KeyConditions[tableHashKey].AttributeValueList[0][tableHashType] | ||
opts.start = db.hashPrefix(tableHashVal, tableHashType) | ||
opts.end = db.hashPrefix(tableHashVal, tableHashType) + '~~' | ||
} | ||
if (data.ExclusiveStartKey) { | ||
@@ -95,30 +85,18 @@ var tableStartKey = table.KeySchema.reduce(function(obj, attr) { | ||
}, {}) | ||
var startKey = db.validateKey(tableStartKey, table) | ||
if (startKey instanceof Error) { | ||
return cb(db.validationError('The provided starting key is invalid: ' + startKey.message)) | ||
if ((err = db.validateKey(tableStartKey, table)) != null) { | ||
return cb(db.validationError('The provided starting key is invalid: ' + err.message)) | ||
} | ||
opts.start = startKey + '\x00' | ||
if (Object.keys(data.KeyConditions).length == 1) { | ||
var comparisonOperator = data.KeyConditions[hashKey].ComparisonOperator | ||
if (comparisonOperator == 'EQ') { | ||
var type = Object.keys(data.ExclusiveStartKey[hashKey])[0] | ||
if (data.ExclusiveStartKey[hashKey][type] != data.KeyConditions[hashKey].AttributeValueList[0][type]) { | ||
return cb(db.validationError('The provided starting key is outside query boundaries based on provided conditions')) | ||
} | ||
if (!rangeKey || !data.KeyConditions[rangeKey]) { | ||
if (data.ExclusiveStartKey[hashKey][hashType] != hashVal) { | ||
return cb(db.validationError('The provided starting key is outside query boundaries based on provided conditions')) | ||
} | ||
} else { | ||
comparisonOperator = data.KeyConditions[rangeKey].ComparisonOperator | ||
type = Object.keys(data.ExclusiveStartKey[rangeKey])[0] | ||
// TODO: Need more extensive checking than this | ||
if (comparisonOperator == 'GT' && data.ExclusiveStartKey[rangeKey][type] <= data.KeyConditions[rangeKey].AttributeValueList[0][type]) { | ||
var matchesRange = db.compare(data.KeyConditions[rangeKey].ComparisonOperator, | ||
data.ExclusiveStartKey[rangeKey], data.KeyConditions[rangeKey].AttributeValueList) | ||
if (!matchesRange) { | ||
return cb(db.validationError('The provided starting key does not match the range key predicate')) | ||
} | ||
comparisonOperator = data.KeyConditions[hashKey].ComparisonOperator | ||
if (comparisonOperator == 'EQ') { | ||
type = Object.keys(data.ExclusiveStartKey[hashKey])[0] | ||
if (data.ExclusiveStartKey[hashKey][type] != data.KeyConditions[hashKey].AttributeValueList[0][type]) { | ||
return cb(db.validationError('The query can return at most one row and cannot be restarted')) | ||
} | ||
if (data.ExclusiveStartKey[hashKey][hashType] != hashVal) { | ||
return cb(db.validationError('The query can return at most one row and cannot be restarted')) | ||
} | ||
@@ -128,6 +106,3 @@ } | ||
if (data._projection) { | ||
err = db.validateKeyPaths(data._projection.nestedPaths, table) | ||
if (err) return cb(err) | ||
} | ||
if ((err = db.validateKeyPaths((data._projection || {}).nestedPaths, table)) != null) return cb(err) | ||
@@ -146,143 +121,57 @@ if (data.QueryFilter || data._filter) { | ||
if (data.IndexName) { | ||
if (data.Select == 'ALL_ATTRIBUTES' && !isLocal && projectionType != 'ALL') { | ||
return cb(db.validationError('One or more parameter values were invalid: ' + | ||
'Select type ALL_ATTRIBUTES is not supported for global secondary index ' + | ||
data.IndexName + ' because its projection type is not ALL')) | ||
} | ||
if (indexAttrs && data.Select != 'ALL_ATTRIBUTES') { | ||
data.AttributesToGet = indexAttrs.concat(startKeyNames) | ||
} | ||
if (fetchFromItemDb && !isLocal) { | ||
return cb(db.validationError('One or more parameter values were invalid: ' + | ||
'Select type ALL_ATTRIBUTES is not supported for global secondary index ' + | ||
data.IndexName + ' because its projection type is not ALL')) | ||
} | ||
if (data._filter && (err = db.validateKeyPaths(data._filter.nestedPaths, table)) != null) { | ||
return cb(err) | ||
} | ||
if ((err = db.validateKeyPaths((data._filter || {}).nestedPaths, table)) != null) return cb(err) | ||
startKeyNames.forEach(function(attr) { | ||
if (!data.KeyConditions[attr]) | ||
data.KeyConditions[attr] = {ComparisonOperator: 'NOT_NULL'} | ||
}) | ||
var opts = {reverse: data.ScanIndexForward === false, limit: data.Limit ? data.Limit + 1 : -1} | ||
var itemDb = store.getItemDb(data.TableName), vals, lastItem | ||
opts.gte = db.hashPrefix(hashVal, hashType) + '/' + db.toRangeStr(hashVal, hashType) + '/' | ||
opts.lt = opts.gte + '~' | ||
// TODO: We currently don't deal nicely with indexes or reverse queries | ||
if (data.ScanIndexForward === false || data.IndexName) { | ||
var em = new events.EventEmitter | ||
vals = new Lazy(em) | ||
db.lazy(itemDb.createValueStream(), cb) | ||
.filter(function(val) { return db.matchesFilter(val, data.KeyConditions) }) | ||
.join(function(items) { | ||
var compareFn = db.itemCompare(rangeKey, table) | ||
if (data.IndexName) | ||
items.sort(compareFn) | ||
if (data.ScanIndexForward === false) | ||
items.reverse() | ||
if (data.ExclusiveStartKey) { | ||
for (var i = 0; i < items.length; i++) { | ||
if (data.ScanIndexForward === false) { | ||
if (compareFn(data.ExclusiveStartKey, items[i]) > 0) | ||
break | ||
} else if (compareFn(data.ExclusiveStartKey, items[i]) < 0) { | ||
break | ||
} | ||
} | ||
items = items.slice(i) | ||
} | ||
items.forEach(function(item) { em.emit('data', item) }) | ||
em.emit('end') | ||
}) | ||
} else { | ||
vals = db.lazy(itemDb.createValueStream(opts), cb) | ||
if (data.KeyConditions[rangeKey]) { | ||
var rangeStrPrefix = db.toRangeStr(data.KeyConditions[rangeKey].AttributeValueList[0]) | ||
var rangeStr = rangeStrPrefix + '/' | ||
var comp = data.KeyConditions[rangeKey].ComparisonOperator | ||
if (comp == 'EQ') { | ||
opts.gte += rangeStr | ||
opts.lte = opts.gte + '~' | ||
delete opts.lt | ||
} else if (comp == 'LT') { | ||
opts.lt = opts.gte + rangeStr | ||
} else if (comp == 'LE') { | ||
opts.lte = opts.gte + rangeStr | ||
delete opts.lt | ||
} else if (comp == 'GT') { | ||
opts.gt = opts.gte + rangeStr | ||
delete opts.gte | ||
} else if (comp == 'GE') { | ||
opts.gte += rangeStr | ||
} else if (comp == 'BEGINS_WITH') { | ||
opts.lt = opts.gte + rangeStrPrefix + '~' | ||
opts.gte += rangeStr | ||
} else if (comp == 'BETWEEN') { | ||
opts.lte = opts.gte + db.toRangeStr(data.KeyConditions[rangeKey].AttributeValueList[1]) + '/' | ||
opts.gte += rangeStr | ||
delete opts.lt | ||
} | ||
} | ||
vals = vals.filter(function(val) { | ||
if (!db.matchesFilter(val, data.KeyConditions)) { | ||
if (lastItem) lastItem = null | ||
return false | ||
} | ||
if (data.ExclusiveStartKey) { | ||
var startKey = db.createIndexKey(data.ExclusiveStartKey, table, keySchema) | ||
lastItem = val | ||
return true | ||
}) | ||
var size = 0, capacitySize = 0, count = 0, scannedCount = 0, limited = false | ||
vals = vals.takeWhile(function(val) { | ||
if (count >= data.Limit || size > 1041375) { | ||
limited = true | ||
return false | ||
if (data.ScanIndexForward === false) { | ||
opts.lt = startKey | ||
delete opts.lte | ||
} else { | ||
opts.gt = startKey | ||
delete opts.gte | ||
} | ||
size += db.itemSize(val, true) | ||
count++ | ||
// TODO: Combine this with above | ||
if (~['TOTAL', 'INDEXES'].indexOf(data.ReturnConsumedCapacity)) | ||
capacitySize += db.itemSize(val) | ||
return true | ||
}) | ||
if (data._filter) { | ||
vals = vals.filter(function(val) { | ||
scannedCount++ | ||
return db.matchesExprFilter(val, data._filter.expression) | ||
}) | ||
} else if (data.QueryFilter) { | ||
vals = vals.filter(function(val) { | ||
scannedCount++ | ||
return db.matchesFilter(val, data.QueryFilter, data.ConditionalOperator) | ||
}) | ||
} | ||
if (data._projection) { | ||
vals = vals.map(db.mapPaths.bind(db, data._projection.paths)) | ||
} else if (data.AttributesToGet) { | ||
vals = vals.map(function(val) { | ||
return data.AttributesToGet.reduce(function(item, attr) { | ||
if (val[attr] != null) item[attr] = val[attr] | ||
return item | ||
}, {}) | ||
}) | ||
} | ||
vals.join(function(items) { | ||
var result = {Count: items.length, ScannedCount: scannedCount || items.length}, | ||
capacityUnits, tableUnits, indexUnits, indexAttr | ||
if (limited || (data.Limit && lastItem) || size > 1041575) { | ||
if (data.Limit) items.splice(data.Limit) | ||
result.Count = items.length | ||
result.ScannedCount = scannedCount || items.length | ||
if (result.Count) { | ||
result.LastEvaluatedKey = table.KeySchema.concat(keySchema).reduce(function(key, schemaPiece) { | ||
key[schemaPiece.AttributeName] = items[items.length - 1][schemaPiece.AttributeName] | ||
return key | ||
}, {}) | ||
} | ||
} | ||
if (data.Select != 'COUNT') result.Items = items | ||
if (~['TOTAL', 'INDEXES'].indexOf(data.ReturnConsumedCapacity)) { | ||
capacityUnits = Math.ceil(capacitySize / 1024 / 4) * (data.ConsistentRead ? 1 : 0.5) | ||
if (indexAttrs && data.Select == 'ALL_ATTRIBUTES') { | ||
tableUnits = capacityUnits | ||
indexUnits = Math.floor(capacityUnits / result.ScannedCount) | ||
} else { | ||
tableUnits = data.IndexName ? 0 : capacityUnits | ||
indexUnits = data.IndexName ? capacityUnits : 0 | ||
} | ||
result.ConsumedCapacity = { | ||
CapacityUnits: tableUnits + indexUnits, | ||
TableName: data.TableName, | ||
} | ||
if (data.ReturnConsumedCapacity == 'INDEXES') { | ||
result.ConsumedCapacity.Table = {CapacityUnits: tableUnits} | ||
indexAttr = isLocal ? 'LocalSecondaryIndexes' : 'GlobalSecondaryIndexes' | ||
result.ConsumedCapacity[indexAttr] = {} | ||
result.ConsumedCapacity[indexAttr][data.IndexName] = {CapacityUnits: indexUnits} | ||
} | ||
} | ||
cb(null, result) | ||
}) | ||
db.queryTable(store, table, data, opts, isLocal, fetchFromItemDb, startKeyNames, cb) | ||
}) | ||
} |
@@ -11,3 +11,3 @@ var once = require('once'), | ||
var keySchema = table.KeySchema, startKeyNames = keySchema.map(function(key) { return key.AttributeName }), | ||
projectionType, indexAttrs, isLocal | ||
fetchFromItemDb = false, isLocal | ||
@@ -27,6 +27,7 @@ if (data.IndexName) { | ||
} | ||
if (!isLocal && data.ConsistentRead) { | ||
return cb(db.validationError('Consistent reads are not supported on global secondary indexes')) | ||
} | ||
keySchema = index.KeySchema | ||
projectionType = index.Projection.ProjectionType | ||
if (projectionType == 'INCLUDE') | ||
indexAttrs = index.Projection.NonKeyAttributes | ||
fetchFromItemDb = data.Select == 'ALL_ATTRIBUTES' && index.Projection.ProjectionType != 'ALL' | ||
keySchema.forEach(function(key) { if (!~startKeyNames.indexOf(key.AttributeName)) startKeyNames.push(key.AttributeName) }) | ||
@@ -40,28 +41,16 @@ } | ||
if (data.IndexName) { | ||
if (data.ExclusiveStartKey) { | ||
err = db.traverseKey(table, keySchema, function(attr, type, isHash) { | ||
if (!data.ExclusiveStartKey[attr]) { | ||
return db.validationError('The provided starting key is invalid') | ||
} | ||
return db.validateKeyPiece(data.ExclusiveStartKey, attr, type, isHash) | ||
}) | ||
if (err) return cb(err) | ||
} | ||
if (data.Select == 'ALL_ATTRIBUTES' && !isLocal && projectionType != 'ALL') { | ||
return cb(db.validationError('One or more parameter values were invalid: ' + | ||
'Select type ALL_ATTRIBUTES is not supported for global secondary index ' + | ||
data.IndexName + ' because its projection type is not ALL')) | ||
} | ||
if (indexAttrs && data.Select != 'ALL_ATTRIBUTES') { | ||
data.AttributesToGet = indexAttrs.concat(startKeyNames) | ||
} | ||
if (data.IndexName && data.ExclusiveStartKey) { | ||
err = db.traverseKey(table, keySchema, function(attr, type, isHash) { | ||
if (!data.ExclusiveStartKey[attr]) { | ||
return db.validationError('The provided starting key is invalid') | ||
} | ||
return db.validateKeyPiece(data.ExclusiveStartKey, attr, type, isHash) | ||
}) | ||
if (err) return cb(err) | ||
} | ||
var opts = {} | ||
if (data.TotalSegments > 1) { | ||
if (data.Segment > 0) | ||
opts.start = ('00' + Math.ceil(4096 * data.Segment / data.TotalSegments).toString(16)).slice(-3) | ||
opts.end = ('00' + (Math.ceil(4096 * (data.Segment + 1) / data.TotalSegments) - 1).toString(16)).slice(-3) + '~' | ||
if (fetchFromItemDb && !isLocal) { | ||
return cb(db.validationError('One or more parameter values were invalid: ' + | ||
'Select type ALL_ATTRIBUTES is not supported for global secondary index ' + | ||
data.IndexName + ' because its projection type is not ALL')) | ||
} | ||
@@ -74,8 +63,17 @@ | ||
}, {}) | ||
if ((err = db.validateKey(tableStartKey, table)) != null) { | ||
return cb(db.validationError('The provided starting key is invalid: ' + err.message)) | ||
} | ||
} | ||
var startKey = db.validateKey(tableStartKey, table) | ||
if (startKey instanceof Error) { | ||
return cb(db.validationError('The provided starting key is invalid: ' + startKey.message)) | ||
} | ||
if (data.TotalSegments > 1 && (startKey < opts.start || startKey > opts.end)) { | ||
if (data.TotalSegments > 1) { | ||
if (data.Segment > 0) | ||
var hashStart = ('00' + Math.ceil(4096 * data.Segment / data.TotalSegments).toString(16)).slice(-3) | ||
var hashEnd = ('00' + (Math.ceil(4096 * (data.Segment + 1) / data.TotalSegments) - 1).toString(16)).slice(-3) + '~' | ||
} | ||
if (data.ExclusiveStartKey) { | ||
var startKey = db.createIndexKey(data.ExclusiveStartKey, table, keySchema) | ||
if (data.TotalSegments > 1 && (startKey < hashStart || startKey > hashEnd)) { | ||
return cb(db.validationError('The provided starting key is invalid: Invalid ExclusiveStartKey. ' + | ||
@@ -85,69 +83,14 @@ 'Please use ExclusiveStartKey with correct Segment. ' + | ||
} | ||
opts.start = startKey + '\x00' | ||
} | ||
if (data._projection) { | ||
err = db.validateKeyPaths(data._projection.nestedPaths, table) | ||
if (err) return cb(err) | ||
hashStart = startKey | ||
} | ||
if (data._filter && (err = db.validateKeyPaths(data._filter.nestedPaths, table)) != null) { | ||
return cb(err) | ||
} | ||
if ((err = db.validateKeyPaths((data._projection || {}).nestedPaths, table)) != null) return cb(err) | ||
var itemDb = store.getItemDb(data.TableName), vals, scannedCount = 0, size = 0, capacitySize = 0, lastItem | ||
if ((err = db.validateKeyPaths((data._filter || {}).nestedPaths, table)) != null) return cb(err) | ||
vals = db.lazy(itemDb.createValueStream(opts), cb) | ||
var opts = {limit: data.Limit ? data.Limit + 1 : -1, gt: hashStart, lt: hashEnd} | ||
vals = vals.takeWhile(function(val) { | ||
if (scannedCount >= data.Limit || size > 1042000) return false | ||
scannedCount++ | ||
size += db.itemSize(val, true) | ||
// TODO: Combine this with above | ||
if (~['TOTAL', 'INDEXES'].indexOf(data.ReturnConsumedCapacity)) | ||
capacitySize += db.itemSize(val) | ||
lastItem = val | ||
return true | ||
}) | ||
if (data._filter) { | ||
vals = vals.filter(function(val) { return db.matchesExprFilter(val, data._filter.expression) }) | ||
} else if (data.ScanFilter) { | ||
vals = vals.filter(function(val) { return db.matchesFilter(val, data.ScanFilter, data.ConditionalOperator) }) | ||
} | ||
if (data._projection) { | ||
vals = vals.map(db.mapPaths.bind(db, data._projection.paths)) | ||
} else if (data.AttributesToGet) { | ||
vals = vals.map(function(val) { | ||
return data.AttributesToGet.reduce(function(item, attr) { | ||
if (val[attr] != null) item[attr] = val[attr] | ||
return item | ||
}, {}) | ||
}) | ||
} | ||
vals.join(function(items) { | ||
var result = {Count: items.length, ScannedCount: scannedCount} | ||
if (data.Select != 'COUNT') result.Items = items | ||
if ((data.Limit && data.Limit <= scannedCount) || size > 1042000) { | ||
result.LastEvaluatedKey = table.KeySchema.reduce(function(key, schemaPiece) { | ||
key[schemaPiece.AttributeName] = lastItem[schemaPiece.AttributeName] | ||
return key | ||
}, {}) | ||
} | ||
if (~['TOTAL', 'INDEXES'].indexOf(data.ReturnConsumedCapacity)) | ||
result.ConsumedCapacity = { | ||
CapacityUnits: Math.ceil(capacitySize / 1024 / 4) * 0.5, | ||
TableName: data.TableName, | ||
Table: data.ReturnConsumedCapacity == 'INDEXES' ? | ||
{CapacityUnits: Math.ceil(capacitySize / 1024 / 4) * 0.5} : undefined, | ||
} | ||
cb(null, result) | ||
}) | ||
db.queryTable(store, table, data, opts, isLocal, fetchFromItemDb, startKeyNames, cb) | ||
}) | ||
} |
@@ -9,54 +9,8 @@ var Big = require('big.js'), | ||
var key = db.validateKey(data.Key, table) | ||
if (key instanceof Error) return cb(key) | ||
if ((err = db.validateKey(data.Key, table)) != null) return cb(err) | ||
if (data.AttributeUpdates || data._updates) { | ||
err = db.traverseKey(table, function(attr) { | ||
var hasKey = false | ||
if (data._updates) { | ||
var sections = data._updates.sections | ||
for (var j = 0; j < sections.length; j++) { | ||
if (sections[j].path[0] == attr) { | ||
hasKey = true | ||
break | ||
} | ||
} | ||
} else { | ||
hasKey = data.AttributeUpdates[attr] != null | ||
} | ||
if (hasKey) { | ||
return db.validationError('One or more parameter values were invalid: ' + | ||
'Cannot update attribute ' + attr + '. This attribute is part of the key') | ||
} | ||
}) | ||
if (err) return cb(err) | ||
err = db.traverseIndexes(table, function(attr, type, index) { | ||
var actualType | ||
if (data._updates) { | ||
var sections = data._updates.sections | ||
for (var i = 0; i < sections.length; i++) { | ||
var section = sections[i] | ||
if (section.path.length == 1 && section.path[0] == attr) { | ||
actualType = section.attrType | ||
break | ||
} | ||
} | ||
} else { | ||
actualType = data.AttributeUpdates[attr] ? Object.keys(data.AttributeUpdates[attr].Value)[0] : null | ||
} | ||
if (actualType != null && actualType != type) { | ||
return db.validationError('One or more parameter values were invalid: ' + | ||
'Type mismatch for Index Key ' + attr + ' Expected: ' + type + | ||
' Actual: ' + actualType + ' IndexName: ' + index.IndexName) | ||
} | ||
}) | ||
if (err) return cb(err) | ||
} | ||
if ((err = db.validateUpdates(data.AttributeUpdates, data._updates, table)) != null) return cb(err) | ||
if (data._updates && (err = db.validateKeyPaths(data._updates.nestedPaths, table)) != null) { | ||
return cb(err) | ||
} | ||
var itemDb = store.getItemDb(data.TableName), key = db.createKey(data.Key, table) | ||
var itemDb = store.getItemDb(data.TableName) | ||
itemDb.lock(key, function(release) { | ||
@@ -70,3 +24,4 @@ cb = release(cb) | ||
var returnObj = {}, item = data.Key | ||
var returnObj = {}, item = data.Key, | ||
paths = data._updates ? data._updates.paths : Object.keys(data.AttributeUpdates || {}) | ||
@@ -80,12 +35,3 @@ if (oldItem) { | ||
} else if (data.ReturnValues == 'UPDATED_OLD') { | ||
if (data._updates) { | ||
returnObj.Attributes = db.mapPaths(data._updates.paths, oldItem) | ||
} else { | ||
returnObj.Attributes = {} | ||
for (attr in data.AttributeUpdates) { | ||
if (oldItem[attr] != null) { | ||
returnObj.Attributes[attr] = oldItem[attr] | ||
} | ||
} | ||
} | ||
returnObj.Attributes = db.mapPaths(paths, oldItem) | ||
} | ||
@@ -104,12 +50,3 @@ } | ||
} else if (data.ReturnValues == 'UPDATED_NEW') { | ||
if (data._updates) { | ||
returnObj.Attributes = db.mapPaths(data._updates.paths, item) | ||
} else { | ||
returnObj.Attributes = {} | ||
for (attr in data.AttributeUpdates) { | ||
if (item[attr] != null) { | ||
returnObj.Attributes[attr] = item[attr] | ||
} | ||
} | ||
} | ||
returnObj.Attributes = db.mapPaths(paths, item) | ||
} | ||
@@ -119,5 +56,9 @@ | ||
itemDb.put(key, item, function(err) { | ||
db.updateIndexes(store, table, oldItem, item, function(err) { | ||
if (err) return cb(err) | ||
cb(null, returnObj) | ||
itemDb.put(key, item, function(err) { | ||
if (err) return cb(err) | ||
cb(null, returnObj) | ||
}) | ||
}) | ||
@@ -124,0 +65,0 @@ }) |
405
db/index.js
var crypto = require('crypto'), | ||
lazy = require('lazy'), | ||
events = require('events'), | ||
async = require('async'), | ||
Lazy = require('lazy'), | ||
levelup = require('levelup'), | ||
@@ -13,10 +15,13 @@ memdown = require('memdown'), | ||
exports.validateKey = validateKey | ||
exports.validateItem = validateItem | ||
exports.validateUpdates = validateUpdates | ||
exports.validateKeyPiece = validateKeyPiece | ||
exports.validateKeyPaths = validateKeyPaths | ||
exports.validateItem = validateItem | ||
exports.createKey = createKey | ||
exports.createIndexKey = createIndexKey | ||
exports.traverseKey = traverseKey | ||
exports.traverseIndexes = traverseIndexes | ||
exports.toRangeStr = toRangeStr | ||
exports.toLexiStr = toLexiStr | ||
exports.hashPrefix = hashPrefix | ||
exports.itemCompare = itemCompare | ||
exports.validationError = validationError | ||
@@ -32,2 +37,5 @@ exports.checkConditional = checkConditional | ||
exports.mapPath = mapPath | ||
exports.queryTable = queryTable | ||
exports.updateIndexes = updateIndexes | ||
exports.getIndexActions = getIndexActions | ||
@@ -61,2 +69,10 @@ function create(options) { | ||
function getIndexDb(indexType, tableName, indexName) { | ||
return getSubDb('index-' + indexType.toLowerCase() + '~' + tableName + '~' + indexName) | ||
} | ||
function deleteIndexDb(indexType, tableName, indexName, cb) { | ||
deleteSubDb('index-' + indexType.toLowerCase() + '~' + tableName + '~' + indexName, cb) | ||
} | ||
function getSubDb(name) { | ||
@@ -115,2 +131,4 @@ if (!subDbs[name]) { | ||
deleteItemDb: deleteItemDb, | ||
getIndexDb: getIndexDb, | ||
deleteIndexDb: deleteIndexDb, | ||
getTable: getTable, | ||
@@ -123,3 +141,3 @@ recreate: recreate, | ||
if (errHandler) stream.on('error', errHandler) | ||
var streamAsLazy = lazy(stream) | ||
var streamAsLazy = new Lazy(stream) | ||
if (stream.destroy) streamAsLazy.on('pipe', stream.destroy.bind(stream)) | ||
@@ -134,13 +152,70 @@ return streamAsLazy | ||
} | ||
return traverseKey(table, keySchema, function(attr, type, isHash) { | ||
return validateKeyPiece(dataKey, attr, type, isHash) | ||
}) | ||
} | ||
var keyStr, err | ||
err = traverseKey(table, keySchema, function(attr, type, isHash) { | ||
var err = validateKeyPiece(dataKey, attr, type, isHash) | ||
if (err) return err | ||
if (!keyStr) keyStr = hashPrefix(dataKey[attr][type], type) | ||
keyStr += '~' + toLexiStr(dataKey[attr][type], type) | ||
function validateItem(dataItem, table) { | ||
return traverseKey(table, function(attr, type, isHash) { | ||
if (dataItem[attr] == null) { | ||
return validationError('One or more parameter values were invalid: ' + | ||
'Missing the key ' + attr + ' in the item') | ||
} | ||
if (dataItem[attr][type] == null) { | ||
return validationError('One or more parameter values were invalid: ' + | ||
'Type mismatch for key ' + attr + ' expected: ' + type + | ||
' actual: ' + Object.keys(dataItem[attr])[0]) | ||
} | ||
return checkKeySize(dataItem[attr][type], type, isHash) | ||
}) || traverseIndexes(table, function(attr, type, index) { | ||
if (dataItem[attr] != null && dataItem[attr][type] == null) { | ||
return validationError('One or more parameter values were invalid: ' + | ||
'Type mismatch for Index Key ' + attr + ' Expected: ' + type + | ||
' Actual: ' + Object.keys(dataItem[attr])[0] + ' IndexName: ' + index.IndexName) | ||
} | ||
}) | ||
return err || keyStr | ||
} | ||
function validateUpdates(attributeUpdates, expressionUpdates, table) { | ||
if (attributeUpdates == null && expressionUpdates == null) return | ||
return traverseKey(table, function(attr) { | ||
var hasKey = false | ||
if (expressionUpdates) { | ||
var sections = expressionUpdates.sections | ||
for (var j = 0; j < sections.length; j++) { | ||
if (sections[j].path[0] == attr) { | ||
hasKey = true | ||
break | ||
} | ||
} | ||
} else { | ||
hasKey = attributeUpdates[attr] != null | ||
} | ||
if (hasKey) { | ||
return validationError('One or more parameter values were invalid: ' + | ||
'Cannot update attribute ' + attr + '. This attribute is part of the key') | ||
} | ||
}) || traverseIndexes(table, function(attr, type, index) { | ||
var actualType | ||
if (expressionUpdates) { | ||
var sections = expressionUpdates.sections | ||
for (var i = 0; i < sections.length; i++) { | ||
var section = sections[i] | ||
if (section.path.length == 1 && section.path[0] == attr) { | ||
actualType = section.attrType | ||
break | ||
} | ||
} | ||
} else { | ||
actualType = attributeUpdates[attr] ? Object.keys(attributeUpdates[attr].Value)[0] : null | ||
} | ||
if (actualType != null && actualType != type) { | ||
return validationError('One or more parameter values were invalid: ' + | ||
'Type mismatch for Index Key ' + attr + ' Expected: ' + type + | ||
' Actual: ' + actualType + ' IndexName: ' + index.IndexName) | ||
} | ||
}) || validateKeyPaths((expressionUpdates || {}).nestedPaths, table) | ||
} | ||
function validateKeyPiece(key, attr, type, isHash) { | ||
@@ -154,2 +229,3 @@ if (key[attr] == null || key[attr][type] == null) { | ||
function validateKeyPaths(nestedPaths, table) { | ||
if (!nestedPaths) return | ||
return traverseKey(table, function(attr) { | ||
@@ -168,30 +244,18 @@ if (nestedPaths[attr]) { | ||
function validateItem(dataItem, table) { | ||
var keyStr, err | ||
err = traverseKey(table, function(attr, type, isHash) { | ||
if (dataItem[attr] == null) { | ||
return validationError('One or more parameter values were invalid: ' + | ||
'Missing the key ' + attr + ' in the item') | ||
} | ||
if (dataItem[attr][type] == null) { | ||
return validationError('One or more parameter values were invalid: ' + | ||
'Type mismatch for key ' + attr + ' expected: ' + type + | ||
' actual: ' + Object.keys(dataItem[attr])[0]) | ||
} | ||
err = checkKeySize(dataItem[attr][type], type, isHash) | ||
if (err) return err | ||
if (!keyStr) keyStr = hashPrefix(dataItem[attr][type], type) | ||
keyStr += '~' + toLexiStr(dataItem[attr][type], type) | ||
function createKey(item, table, keySchema) { | ||
if (keySchema == null) keySchema = table.KeySchema | ||
var keyStr | ||
traverseKey(table, keySchema, function(attr, type, isHash) { | ||
if (isHash) keyStr = hashPrefix(item[attr][type], type) + '/' | ||
keyStr += toRangeStr(item[attr][type], type) + '/' | ||
}) | ||
if (err) return err | ||
err = traverseIndexes(table, function(attr, type, index) { | ||
if (dataItem[attr] != null && dataItem[attr][type] == null) { | ||
return validationError('One or more parameter values were invalid: ' + | ||
'Type mismatch for Index Key ' + attr + ' Expected: ' + type + | ||
' Actual: ' + Object.keys(dataItem[attr])[0] + ' IndexName: ' + index.IndexName) | ||
} | ||
}) | ||
return err || keyStr | ||
return keyStr | ||
} | ||
function createIndexKey(item, table, keySchema) { | ||
var tableKeyPieces = [] | ||
traverseKey(table, function(attr, type) { tableKeyPieces.push(item[attr][type], type) }) | ||
return createKey(item, table, keySchema) + hashPrefix.apply(this, tableKeyPieces) | ||
} | ||
function traverseKey(table, keySchema, visitKey) { | ||
@@ -254,2 +318,11 @@ if (typeof keySchema == 'function') { visitKey = keySchema; keySchema = table.KeySchema } | ||
function toRangeStr(keyPiece, type) { | ||
if (type == null) { | ||
type = Object.keys(keyPiece)[0] | ||
keyPiece = keyPiece[type] | ||
} | ||
if (type == 'S') return new Buffer(keyPiece, 'utf8').toString('hex') | ||
return toLexiStr(keyPiece, type) | ||
} | ||
// Creates lexigraphically sortable number strings | ||
@@ -267,2 +340,6 @@ // 0 7c 009 = '07c009' = -99.1 | ||
if (keyPiece == null) return '' | ||
if (type == null) { | ||
type = Object.keys(keyPiece)[0] | ||
keyPiece = keyPiece[type] | ||
} | ||
if (type == 'B') return new Buffer(keyPiece, 'base64').toString('hex') | ||
@@ -346,29 +423,2 @@ if (type != 'N') return keyPiece | ||
function itemCompare(rangeKey, table) { | ||
return function(item1, item2) { | ||
var val1, val2, rangeType, tableHashKey, tableRangeKey, tableHashType, tableRangeType, | ||
hashVal1, rangeVal1, hashVal2, rangeVal2 | ||
if (rangeKey) { | ||
rangeType = Object.keys(item1[rangeKey] || item2[rangeKey] || {})[0] | ||
rangeVal1 = (item1[rangeKey] || {})[rangeType] | ||
rangeVal2 = (item2[rangeKey] || {})[rangeType] | ||
val1 = toLexiStr(rangeVal1, rangeType) | ||
val2 = toLexiStr(rangeVal2, rangeType) | ||
} | ||
if (!rangeKey || val1 == val2) { | ||
tableHashKey = table.KeySchema[0].AttributeName | ||
tableRangeKey = (table.KeySchema[1] || {}).AttributeName | ||
tableHashType = Object.keys(item1[tableHashKey] || item2[tableHashKey] || {})[0] | ||
tableRangeType = Object.keys(item1[tableRangeKey] || item2[tableRangeKey] || {})[0] | ||
hashVal1 = item1[tableHashKey][tableHashType] | ||
rangeVal1 = (item1[tableRangeKey] || {})[tableRangeType] | ||
hashVal2 = item2[tableHashKey][tableHashType] | ||
rangeVal2 = (item2[tableRangeKey] || {})[tableRangeType] | ||
val1 = hashPrefix(hashVal1, tableHashType, rangeVal1, tableRangeType) | ||
val2 = hashPrefix(hashVal2, tableHashType, rangeVal2, tableRangeType) | ||
} | ||
return val1 < val2 ? -1 : val1 > val2 ? 1 : 0 | ||
} | ||
} | ||
function checkConditional(data, existingItem) { | ||
@@ -411,13 +461,49 @@ existingItem = existingItem || {} | ||
function itemSize(item, skipAttr) { | ||
return Object.keys(item).reduce(function(sum, attr) { | ||
return sum + (skipAttr ? 2 : attr.length) + valSize(item[attr], skipAttr) | ||
function itemSize(item, compress, addMetaSize, rangeKey) { | ||
// Size of compressed item (for checking query/scan limit) seems complicated, | ||
// probably due to some internal serialization format. | ||
var rangeKeySize = 0 | ||
var size = Object.keys(item).reduce(function(sum, attr) { | ||
var size = valSizeWithStorage(item[attr], compress && attr != rangeKey) | ||
if (compress && attr == rangeKey) { | ||
rangeKeySize = size | ||
return sum | ||
} | ||
return sum + size + (compress ? 1 : attr.length) | ||
}, 0) | ||
return !addMetaSize ? size : 2 + size + ((1 + Math.floor((1 + size) / 3072)) * (18 + rangeKeySize)) | ||
} | ||
function valSize(itemAttr, skipAttr) { | ||
function valSizeWithStorage(itemAttr, compress) { | ||
var type = Object.keys(itemAttr)[0] | ||
var val = itemAttr[type] | ||
var size = valSize(val, type, compress) | ||
if (!compress) return size | ||
switch (type) { | ||
case 'S': | ||
return size + (size < 128 ? 1 : size < 16384 ? 2 : 3) | ||
case 'B': | ||
return size + 1 | ||
case 'N': | ||
return size + 1 | ||
case 'SS': | ||
return size + val.length + 1 | ||
case 'BS': | ||
return size + val.length + 1 | ||
case 'NS': | ||
return size + val.length + 1 | ||
case 'NULL': | ||
return 0 | ||
case 'BOOL': | ||
return 1 | ||
case 'L': | ||
return size | ||
case 'M': | ||
return size | ||
} | ||
} | ||
function valSize(val, type, compress) { | ||
switch (type) { | ||
case 'S': | ||
return val.length | ||
@@ -428,12 +514,11 @@ case 'B': | ||
val = new Big(val) | ||
return Math.ceil(val.c.length / 2) + (val.e % 2 ? 1 : 2) | ||
var numDigits = val.c.length | ||
if (numDigits == 1 && val.c[0] === 0) return 1 | ||
return 1 + Math.ceil(numDigits / 2) + (numDigits % 2 || val.e % 2 ? 0 : 1) + (val.s == -1 ? 1 : 0) | ||
case 'SS': | ||
return val.reduce(function(sum, x) { return sum + x.length }, skipAttr ? val.length : 0) // eslint-disable-line no-loop-func | ||
return val.reduce(function(sum, x) { return sum + valSize(x, 'S') }, 0) // eslint-disable-line no-loop-func | ||
case 'BS': | ||
return val.reduce(function(sum, x) { return sum + new Buffer(x, 'base64').length }, skipAttr ? val.length : 0) // eslint-disable-line no-loop-func | ||
return val.reduce(function(sum, x) { return sum + valSize(x, 'B') }, 0) // eslint-disable-line no-loop-func | ||
case 'NS': | ||
return val.reduce(function(sum, x) { // eslint-disable-line no-loop-func | ||
x = new Big(x) | ||
return sum + Math.ceil(x.c.length / 2) + (x.e % 2 ? 1 : 2) | ||
}, skipAttr ? val.length : 0) | ||
return val.reduce(function(sum, x) { return sum + valSize(x, 'N') }, 0) // eslint-disable-line no-loop-func | ||
case 'NULL': | ||
@@ -444,5 +529,5 @@ return 1 | ||
case 'L': | ||
return 3 + val.reduce(function(sum, val) { return sum + 1 + valSize(val, skipAttr) }, 0) | ||
return 3 + val.reduce(function(sum, val) { return sum + 1 + valSizeWithStorage(val, compress) }, 0) | ||
case 'M': | ||
return 3 + Object.keys(val).length + itemSize(val, skipAttr) | ||
return 3 + Object.keys(val).length + itemSize(val, compress) | ||
} | ||
@@ -645,2 +730,3 @@ } | ||
var path = paths[i] | ||
if (!Array.isArray(path)) path = [path] | ||
var resolved = mapPath(path, item) | ||
@@ -650,4 +736,8 @@ if (resolved == null) { | ||
} | ||
if (path.length == 1) { | ||
returnItem[path[0]] = resolved | ||
continue | ||
} | ||
var curItem = {M: returnItem} | ||
for (var j = 0; j < paths[i].length; j++) { | ||
for (var j = 0; j < path.length; j++) { | ||
var piece = path[j] | ||
@@ -681,2 +771,5 @@ if (typeof piece == 'number') { | ||
function mapPath(path, item) { | ||
if (path.length == 1) { | ||
return item[path[0]] | ||
} | ||
var resolved = {M: item} | ||
@@ -698,1 +791,159 @@ for (var i = 0; i < path.length; i++) { | ||
} | ||
function queryTable(store, table, data, opts, isLocal, fetchFromItemDb, startKeyNames, cb) { | ||
var itemDb = store.getItemDb(data.TableName), vals | ||
if (data.IndexName) { | ||
var indexDb = store.getIndexDb(isLocal ? 'local' : 'global', data.TableName, data.IndexName) | ||
vals = lazyStream(indexDb.createValueStream(opts), cb) | ||
} else { | ||
vals = lazyStream(itemDb.createValueStream(opts), cb) | ||
} | ||
var tableCapacity = 0, indexCapacity = 0, | ||
calculateCapacity = ~['TOTAL', 'INDEXES'].indexOf(data.ReturnConsumedCapacity) | ||
if (fetchFromItemDb) { | ||
var em = new events.EventEmitter | ||
var queue = async.queue(function(key, cb) { | ||
if (!key) { | ||
em.emit('end') | ||
return cb() | ||
} | ||
itemDb.get(key, function(err, item) { | ||
if (err) { | ||
em.emit('error', err) | ||
return cb(err) | ||
} | ||
if (calculateCapacity) tableCapacity += itemSize(item) | ||
em.emit('data', item) | ||
cb() | ||
}) | ||
}) | ||
var oldVals = vals | ||
vals = new Lazy(em) | ||
oldVals.map(function(item) { | ||
if (calculateCapacity) indexCapacity += itemSize(item) | ||
queue.push(createKey(item, table)) | ||
}).once('pipe', queue.push.bind(queue, '')) | ||
} | ||
var size = 0, count = 0, rangeKey = table.KeySchema[1] && table.KeySchema[1].AttributeName | ||
vals = vals.takeWhile(function(val) { | ||
if (count >= data.Limit || size >= 1024 * 1024) { | ||
return false | ||
} | ||
if (calculateCapacity && !fetchFromItemDb) { | ||
var capacitySize = itemSize(val) | ||
if (data.IndexName) { | ||
indexCapacity += capacitySize | ||
} else { | ||
tableCapacity += capacitySize | ||
} | ||
} | ||
count++ | ||
size += itemSize(val, true, true, rangeKey) | ||
return true | ||
}) | ||
var queryFilter = data.QueryFilter || data.ScanFilter | ||
if (data._filter) { | ||
vals = vals.filter(function(val) { return matchesExprFilter(val, data._filter.expression) }) | ||
} else if (queryFilter) { | ||
vals = vals.filter(function(val) { return matchesFilter(val, queryFilter, data.ConditionalOperator) }) | ||
} | ||
var paths = data._projection ? data._projection.paths : data.AttributesToGet | ||
if (paths) { | ||
vals = vals.map(mapPaths.bind(this, paths)) | ||
} | ||
vals.join(function(items) { | ||
var result = {ScannedCount: count} | ||
if (count >= data.Limit || size >= 1024 * 1024) { | ||
if (data.Limit) items.splice(data.Limit) | ||
if (items.length) { | ||
result.LastEvaluatedKey = startKeyNames.reduce(function(key, attr) { | ||
key[attr] = items[items.length - 1][attr] | ||
return key | ||
}, {}) | ||
} | ||
} | ||
result.Count = items.length | ||
if (data.Select != 'COUNT') result.Items = items | ||
if (calculateCapacity) { | ||
var tableUnits = Math.ceil(tableCapacity / 1024 / 4) * (data.ConsistentRead ? 1 : 0.5) | ||
var indexUnits = Math.ceil(indexCapacity / 1024 / 4) * (data.ConsistentRead ? 1 : 0.5) | ||
result.ConsumedCapacity = { | ||
CapacityUnits: tableUnits + indexUnits, | ||
TableName: data.TableName, | ||
} | ||
if (data.ReturnConsumedCapacity == 'INDEXES') { | ||
result.ConsumedCapacity.Table = {CapacityUnits: tableUnits} | ||
if (data.IndexName) { | ||
var indexAttr = isLocal ? 'LocalSecondaryIndexes' : 'GlobalSecondaryIndexes' | ||
result.ConsumedCapacity[indexAttr] = {} | ||
result.ConsumedCapacity[indexAttr][data.IndexName] = {CapacityUnits: indexUnits} | ||
} | ||
} | ||
} | ||
cb(null, result) | ||
}) | ||
} | ||
function updateIndexes(store, table, existingItem, item, cb) { | ||
if (!existingItem && !item) return cb() | ||
var puts = [], deletes = [] | ||
;['Local', 'Global'].forEach(function(indexType) { | ||
var indexes = table[indexType + 'SecondaryIndexes'] || [] | ||
var actions = getIndexActions(indexes, existingItem, item, table) | ||
puts = puts.concat(actions.puts.map(function(action) { | ||
var indexDb = store.getIndexDb(indexType, table.TableName, action.index) | ||
return indexDb.put.bind(indexDb, action.key, action.item) | ||
})) | ||
deletes = deletes.concat(actions.deletes.map(function(action) { | ||
var indexDb = store.getIndexDb(indexType, table.TableName, action.index) | ||
return indexDb.del.bind(indexDb, action.key) | ||
})) | ||
}) | ||
async.parallel(deletes, function(err) { | ||
if (err) return cb(err) | ||
async.parallel(puts, cb) | ||
}) | ||
} | ||
function getIndexActions(indexes, existingItem, item, table) { | ||
var puts = [], deletes = [], tableKeys = table.KeySchema.map(function(key) { return key.AttributeName }) | ||
indexes.forEach(function(index) { | ||
var indexKeys = index.KeySchema.map(function(key) { return key.AttributeName }), key = null | ||
if (item && indexKeys.every(function(key) { return item[key] != null })) { | ||
if (index.Projection.ProjectionType != 'ALL') { | ||
var indexAttrs = indexKeys.concat(tableKeys, index.Projection.NonKeyAttributes || []) | ||
item = indexAttrs.reduce(function(obj, attr) { | ||
obj[attr] = item[attr] | ||
return obj | ||
}, Object.create(null)) | ||
} | ||
key = createIndexKey(item, table, index.KeySchema) | ||
puts.push({index: index.IndexName, key: key, item: item}) | ||
} | ||
if (existingItem && indexKeys.every(function(key) { return existingItem[key] != null })) { | ||
var existingKey = createIndexKey(existingItem, table, index.KeySchema) | ||
if (existingKey != key) { | ||
deletes.push({index: index.IndexName, key: existingKey}) | ||
} | ||
} | ||
}) | ||
return {puts: puts, deletes: deletes} | ||
} |
{ | ||
"name": "dynalite", | ||
"version": "0.19.1", | ||
"version": "1.0.0", | ||
"description": "An implementation of Amazon's DynamoDB built on LevelDB", | ||
@@ -5,0 +5,0 @@ "main": "index.js", |
@@ -12,7 +12,22 @@ dynalite | ||
NB: Schema changes in v1.x | ||
-------------------------- | ||
If you've been using v0.x with a saved path on your filesystem, you should note | ||
that the schema has been changed to separate out indexes. This means that if | ||
you have tables with indexes on the old schema, you'll need to update them – | ||
this should just be a matter of getting each item and writing it again – a | ||
Scan/BatchWriteItem loop should suffice to populate the indexes correctly. | ||
Why not Amazon's DynamoDB Local? | ||
-------------------------------- | ||
Because it's too buggy! And it differs too much from the live instances in a number of key areas | ||
([see below](#problems-with-amazons-dynamodb-local)) | ||
Good question! These days it's actually pretty good, and considering it's now probably | ||
used by countless AWS devs, it'll probably be well supported going forward. Unless you | ||
specifically can't, or don't want to, use Java, or you're having problems with it, | ||
you'll probably be better off sticking with it! Originally, however, DynamoDB Local | ||
didn't exist, and when it did, differed a lot from the live instances in ways that caused | ||
my company issues. Most of those issues have been addressed in time, but DynamoDB Local | ||
does still differ in a number of ways from the live DynamoDB instances – | ||
([see below](#problems-with-amazons-dynamodb-local)) for details. | ||
@@ -79,51 +94,29 @@ Example | ||
- Implement DynamoDB Streams | ||
- Implement `ReturnItemCollectionMetrics` on all remaining endpoints | ||
- Implement size info for tables and indexes | ||
- Add ProvisionedThroughput checking | ||
- See [open issues](https://github.com/mhart/dynalite/issues) for an up-to-date list of outstanding features | ||
- See [open issues on GitHub](https://github.com/mhart/dynalite/issues) for any further TODOs | ||
Problems with Amazon's DynamoDB Local | ||
Problems with Amazon's DynamoDB Local (UPDATED 2016-04-19) | ||
------------------------------------- | ||
Part of the reason I wrote dynalite was due to the existing mock libraries not exhibiting the same behaviour as the | ||
live instances. Amazon released their DynamoDB Local Java tool recently, but the current version (2013-09-12) still | ||
has quite a number of issues that have prevented us (at [Adslot](http://adslot.com/)) from testing our production code, | ||
especially in a manner that simulates actual behaviour on the live instances. | ||
live instances. Amazon then released their DynamoDB Local Java, but the early versions were still very different. | ||
The latest version I checked (2016-04-19) is much better, but still has a few differences. | ||
Some of these are documented (eg, no `ConsumedCapacity` returned), but most aren't - | ||
the items below are a rough list of the issues we've found (and do not exist in dynalite), vaguely in order of importance: | ||
[Some of these are documented](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Tools.DynamoDBLocal.html#Tools.DynamoDBLocal.Differences), | ||
but most aren't - the items below are a rough list of the issues found, vaguely in order of importance: | ||
- Returns 400 when `UpdateItem` uses the default `PUT` `Action` without explicitly specifying it | ||
(this actually prevents certain client libraries from being used at all) | ||
- Does not return correct number of `UnprocessedKeys` in `BatchGet` (returns one less!) | ||
- Returns 400 when trying to put valid numbers with less than 38 significant digits, eg 1e40 | ||
- Returns 200 for duplicated keys in `BatchGetItem` | ||
- Returns 200 when hash key is too big in `GetItem`/`BatchGetItem` | ||
- Returns 200 when range key is too big in `GetItem`/`BatchGetItem` | ||
- Returns 200 for `PutItem`/`GetItem`/`UpdateItem`/`BatchGetItem`/`Scan`/etc with empty strings (eg, `{a: {S: ''}}`) | ||
- Returns 413 when request is over 1MB (eg, in a `BatchWrite` with 25 items of 64k), but live instances allow 8MB | ||
- Returns `ResourceNotFoundException` in `ListTables` if `ExclusiveStartName` no longer exists | ||
- Does not return `ConsistentRead` property in `UnprocessedKeys` in `BatchGet` even if requested | ||
- Returns 200 for empty `RequestItems` in `BatchGetItem`/`BatchWriteItem` | ||
- Returns 200 when trying to delete `NS` from `SS` or `NS` from `N` or add `NS` to `SS` or `N` to `NS` | ||
- Allows `UpdateTable` when read and write capacity are same as current (should be an error) | ||
- Tables are created in `ACTIVE` state, not `CREATING` state | ||
- Tables are removed without going into a `DELETING` state | ||
- Tables are updated without going into a `UPDATING` state | ||
- `PutItem` returns `Attributes` by default, even though none are requested | ||
- Does not add `ProvisionedThroughput.LastIncreaseDateTime` in `UpdateTable` | ||
- Does not update `ProvisionedThroughput.NumberOfDecreasesToday` in `UpdateTable` | ||
- Does not return nested attributes correctly for `UpdateItem` | ||
- Does not calculate size limits accurately for `BatchGetItem`/`Query`/`Scan` result sets | ||
- Does deal with `ALL_ATTRIBUTES` correctly for global index on `Query`/`Scan` | ||
- Does not prevent primary keys in `QueryFilter` and `FilterExpression` for `Query` | ||
- Does not detect duplicate values in `AttributesToGet` | ||
- Does not return `LastEvaluatedKey` when size just over limit for `Query`/`Scan` | ||
- Does not return `ConsistentRead` property in `UnprocessedKeys` in `BatchGetItem` even if requested | ||
- Doesn't return `ConsumedCapacity` (documented - but makes it very hard to calculate expected usage) | ||
- Often returns 500 instead of 400 (or similarly appropriate status) | ||
- Different serialization and validation error messages from live instances (makes it hard to debug) | ||
- Uses uppercase `Message` for error messages (should only use uppercase for `SerializationException`) | ||
- Often returns 500 instead of 400 (or similarly appropriate status) | ||
- Doesn't return `ConsumedCapacity` (documented - but makes it very hard to calculate expected usage) | ||
- Does not calculate the `Scan` size limits correctly so can return too many items | ||
- Does not return `LastEvaluatedKey` on a `Query` when at end of table | ||
- Does not return `LastEvaluatedKey` on a `Scan` when `Limit` equals number of matching items | ||
- Does not return `X-Amzn-RequestId` header | ||
- Does not return `X-Amz-Crc32` header | ||
- Does not return `application/json` if `application/json` is requested | ||
- Fails to put numbers 1e-130 and -1e-130 (succeeds on live instances) | ||
- Returns an error when calling `Query` on a hash table (succeeds on live instances) | ||
- Returns 500 if random attributes are supplied (just ignored on live instances) | ||
- Does not convert doubles to booleans (returns 500 instead) | ||
- Does not return `Query`/`Scan` items in same order when using hash key or hash `GlobalSecondaryIndex` (shouldn't rely on this anyway) |
@@ -1,4 +0,2 @@ | ||
var validateAttributeValue = require('./index').validateAttributeValue, | ||
validateExpressionParams = require('./index').validateExpressionParams, | ||
validateExpressions = require('./index').validateExpressions, | ||
var validations = require('./index'), | ||
db = require('../db') | ||
@@ -53,21 +51,21 @@ | ||
exports.custom = function(data) { | ||
var numReqs = 0, table, i, key, msg, attrs, tableData, seenKeys | ||
var numReqs = 0 | ||
for (table in data.RequestItems) { | ||
tableData = data.RequestItems[table] | ||
for (var table in data.RequestItems) { | ||
var tableData = data.RequestItems[table] | ||
msg = validateExpressionParams(tableData, ['ProjectionExpression'], ['AttributesToGet']) | ||
var msg = validations.validateExpressionParams(tableData, ['ProjectionExpression'], ['AttributesToGet']) | ||
if (msg) return msg | ||
seenKeys = {} | ||
for (i = 0; i < tableData.Keys.length; i++) { | ||
for (key in tableData.Keys[i]) { | ||
msg = validateAttributeValue(tableData.Keys[i][key]) | ||
var seenKeys = Object.create(null) | ||
for (var i = 0; i < tableData.Keys.length; i++) { | ||
var key = tableData.Keys[i] | ||
for (var attr in key) { | ||
msg = validations.validateAttributeValue(key[attr]) | ||
if (msg) return msg | ||
} | ||
// TODO: this is unnecessarily expensive | ||
var keyStr = Object.keys(tableData.Keys[i]).sort().reduce(function(str, key) { | ||
var type = Object.keys(tableData.Keys[i][key])[0] | ||
return str + '~' + db.toLexiStr(tableData.Keys[i][key][type], type) | ||
}, '') | ||
var keyStr = Object.keys(key).sort().map(function(attr) { return db.toRangeStr(key[attr]) }).join('/') | ||
if (seenKeys[keyStr]) | ||
@@ -83,14 +81,9 @@ return 'Provided list of item keys contains duplicates' | ||
if (tableData.AttributesToGet) { | ||
attrs = Object.create(null) | ||
for (i = 0; i < tableData.AttributesToGet.length; i++) { | ||
if (attrs[tableData.AttributesToGet[i]]) | ||
return 'One or more parameter values were invalid: Duplicate value in attribute name: ' + | ||
tableData.AttributesToGet[i] | ||
attrs[tableData.AttributesToGet[i]] = true | ||
} | ||
msg = validations.findDuplicate(tableData.AttributesToGet) | ||
if (msg) return 'One or more parameter values were invalid: Duplicate value in attribute name: ' + msg | ||
} | ||
msg = validateExpressions(tableData) | ||
msg = validations.validateExpressions(tableData) | ||
if (msg) return msg | ||
} | ||
} |
@@ -1,3 +0,3 @@ | ||
var db = require('../db'), | ||
validateAttributeValue = require('./index').validateAttributeValue | ||
var validations = require('./index'), | ||
db = require('../db') | ||
@@ -67,3 +67,3 @@ exports.types = { | ||
for (key in request.PutRequest.Item) { | ||
msg = validateAttributeValue(request.PutRequest.Item[key]) | ||
msg = validations.validateAttributeValue(request.PutRequest.Item[key]) | ||
if (msg) return msg | ||
@@ -75,3 +75,3 @@ } | ||
for (key in request.DeleteRequest.Key) { | ||
msg = validateAttributeValue(request.DeleteRequest.Key[key]) | ||
msg = validations.validateAttributeValue(request.DeleteRequest.Key[key]) | ||
if (msg) return msg | ||
@@ -78,0 +78,0 @@ } |
@@ -1,5 +0,2 @@ | ||
var validateAttributeValue = require('./index').validateAttributeValue, | ||
validateAttributeConditions = require('./index').validateAttributeConditions, | ||
validateExpressionParams = require('./index').validateExpressionParams, | ||
validateExpressions = require('./index').validateExpressions | ||
var validations = require('./index') | ||
@@ -68,15 +65,15 @@ exports.types = { | ||
var msg = validateExpressionParams(data, ['ConditionExpression'], ['Expected']) | ||
var msg = validations.validateExpressionParams(data, ['ConditionExpression'], ['Expected']) | ||
if (msg) return msg | ||
for (var key in data.Key) { | ||
msg = validateAttributeValue(data.Key[key]) | ||
msg = validations.validateAttributeValue(data.Key[key]) | ||
if (msg) return msg | ||
} | ||
msg = validateAttributeConditions(data) | ||
msg = validations.validateAttributeConditions(data) | ||
if (msg) return msg | ||
msg = validateExpressions(data) | ||
msg = validations.validateExpressions(data) | ||
if (msg) return msg | ||
} |
@@ -1,4 +0,2 @@ | ||
var validateAttributeValue = require('./index').validateAttributeValue, | ||
validateExpressionParams = require('./index').validateExpressionParams, | ||
validateExpressions = require('./index').validateExpressions | ||
var validations = require('./index') | ||
@@ -40,20 +38,15 @@ exports.types = { | ||
var msg = validateExpressionParams(data, ['ProjectionExpression'], ['AttributesToGet']) | ||
var msg = validations.validateExpressionParams(data, ['ProjectionExpression'], ['AttributesToGet']) | ||
if (msg) return msg | ||
for (var key in data.Key) { | ||
msg = validateAttributeValue(data.Key[key]) | ||
msg = validations.validateAttributeValue(data.Key[key]) | ||
if (msg) return msg | ||
} | ||
if (data.AttributesToGet) { | ||
var attrs = Object.create(null) | ||
for (var i = 0; i < data.AttributesToGet.length; i++) { | ||
if (attrs[data.AttributesToGet[i]]) | ||
return 'One or more parameter values were invalid: Duplicate value in attribute name: ' + | ||
data.AttributesToGet[i] | ||
attrs[data.AttributesToGet[i]] = true | ||
} | ||
msg = validations.findDuplicate(data.AttributesToGet) | ||
if (msg) return 'One or more parameter values were invalid: Duplicate value in attribute name: ' + msg | ||
} | ||
msg = validateExpressions(data) | ||
msg = validations.validateExpressions(data) | ||
if (msg) return msg | ||
} |
@@ -10,2 +10,3 @@ var Big = require('big.js'), | ||
exports.toLowerFirst = toLowerFirst | ||
exports.findDuplicate = findDuplicate | ||
exports.validateAttributeValue = validateAttributeValue | ||
@@ -380,9 +381,9 @@ exports.validateConditions = validateConditions | ||
if (type == 'SS' && hasDuplicates(value[type])) | ||
if (type == 'SS' && findDuplicate(value[type])) | ||
return 'One or more parameter values were invalid: Input collection ' + valueStr(value[type]) + ' contains duplicates.' | ||
if (type == 'NS' && hasDuplicates(value[type])) | ||
if (type == 'NS' && findDuplicate(value[type])) | ||
return 'Input collection contains duplicates' | ||
if (type == 'BS' && hasDuplicates(value[type])) | ||
if (type == 'BS' && findDuplicate(value[type])) | ||
return 'One or more parameter values were invalid: Input collection ' + valueStr(value[type]) + 'of type BS contains duplicates.' | ||
@@ -434,9 +435,9 @@ | ||
function hasDuplicates(array) { | ||
var setObj = {} | ||
return array.some(function(val) { | ||
if (setObj[val]) return true | ||
setObj[val] = true | ||
return false | ||
}) | ||
function findDuplicate(arr) { | ||
if (!arr) return null | ||
var vals = Object.create(null) | ||
for (var i = 0; i < arr.length; i++) { | ||
if (vals[arr[i]]) return arr[i] | ||
vals[arr[i]] = true | ||
} | ||
} | ||
@@ -443,0 +444,0 @@ |
@@ -1,6 +0,3 @@ | ||
var db = require('../db'), | ||
validateAttributeValue = require('./index').validateAttributeValue, | ||
validateAttributeConditions = require('./index').validateAttributeConditions, | ||
validateExpressionParams = require('./index').validateExpressionParams, | ||
validateExpressions = require('./index').validateExpressions | ||
var validations = require('./index'), | ||
db = require('../db') | ||
@@ -69,7 +66,7 @@ exports.types = { | ||
var msg = validateExpressionParams(data, ['ConditionExpression'], ['Expected']) | ||
var msg = validations.validateExpressionParams(data, ['ConditionExpression'], ['Expected']) | ||
if (msg) return msg | ||
for (var key in data.Item) { | ||
msg = validateAttributeValue(data.Item[key]) | ||
msg = validations.validateAttributeValue(data.Item[key]) | ||
if (msg) return msg | ||
@@ -84,7 +81,7 @@ } | ||
msg = validateAttributeConditions(data) | ||
msg = validations.validateAttributeConditions(data) | ||
if (msg) return msg | ||
msg = validateExpressions(data) | ||
msg = validations.validateExpressions(data) | ||
if (msg) return msg | ||
} |
@@ -1,6 +0,2 @@ | ||
var validateAttributeValue = require('./index').validateAttributeValue, | ||
validateExpressionParams = require('./index').validateExpressionParams, | ||
validateExpressions = require('./index').validateExpressions, | ||
convertKeyCondition = require('./index').convertKeyCondition, | ||
validateConditions = require('./index').validateConditions | ||
var validations = require('./index') | ||
@@ -104,3 +100,3 @@ exports.types = { | ||
var msg = validateExpressionParams(data, | ||
var msg = validations.validateExpressionParams(data, | ||
['ProjectionExpression', 'FilterExpression', 'KeyConditionExpression'], | ||
@@ -111,17 +107,12 @@ ['AttributesToGet', 'QueryFilter', 'ConditionalOperator', 'KeyConditions']) | ||
var i, key | ||
msg = validateConditions(data.QueryFilter) | ||
msg = validations.validateConditions(data.QueryFilter) | ||
if (msg) return msg | ||
if (data.AttributesToGet) { | ||
var attrs = Object.create(null) | ||
for (i = 0; i < data.AttributesToGet.length; i++) { | ||
if (attrs[data.AttributesToGet[i]]) | ||
return 'One or more parameter values were invalid: Duplicate value in attribute name: ' + | ||
data.AttributesToGet[i] | ||
attrs[data.AttributesToGet[i]] = true | ||
} | ||
msg = validations.findDuplicate(data.AttributesToGet) | ||
if (msg) return 'One or more parameter values were invalid: Duplicate value in attribute name: ' + msg | ||
} | ||
for (key in data.ExclusiveStartKey) { | ||
msg = validateAttributeValue(data.ExclusiveStartKey[key]) | ||
msg = validations.validateAttributeValue(data.ExclusiveStartKey[key]) | ||
if (msg) return 'The provided starting key is invalid: ' + msg | ||
@@ -134,7 +125,7 @@ } | ||
msg = validateExpressions(data) | ||
msg = validations.validateExpressions(data) | ||
if (msg) return msg | ||
if (data._keyCondition != null) { | ||
data.KeyConditions = convertKeyCondition(data._keyCondition.expression) | ||
data.KeyConditions = validations.convertKeyCondition(data._keyCondition.expression) | ||
if (typeof data.KeyConditions == 'string') { | ||
@@ -145,3 +136,3 @@ return data.KeyConditions | ||
msg = validateConditions(data.KeyConditions) | ||
msg = validations.validateConditions(data.KeyConditions) | ||
if (msg) return msg | ||
@@ -148,0 +139,0 @@ |
@@ -1,5 +0,2 @@ | ||
var validateAttributeValue = require('./index').validateAttributeValue, | ||
validateExpressionParams = require('./index').validateExpressionParams, | ||
validateExpressions = require('./index').validateExpressions, | ||
validateConditions = require('./index').validateConditions | ||
var validations = require('./index') | ||
@@ -89,3 +86,3 @@ exports.types = { | ||
var msg = validateExpressionParams(data, | ||
var msg = validations.validateExpressionParams(data, | ||
['ProjectionExpression', 'FilterExpression'], | ||
@@ -96,16 +93,11 @@ ['AttributesToGet', 'ScanFilter', 'ConditionalOperator']) | ||
if (data.AttributesToGet) { | ||
var attrs = Object.create(null) | ||
for (var i = 0; i < data.AttributesToGet.length; i++) { | ||
if (attrs[data.AttributesToGet[i]]) | ||
return 'One or more parameter values were invalid: Duplicate value in attribute name: ' + | ||
data.AttributesToGet[i] | ||
attrs[data.AttributesToGet[i]] = true | ||
} | ||
msg = validations.findDuplicate(data.AttributesToGet) | ||
if (msg) return 'One or more parameter values were invalid: Duplicate value in attribute name: ' + msg | ||
} | ||
msg = validateConditions(data.ScanFilter) | ||
msg = validations.validateConditions(data.ScanFilter) | ||
if (msg) return msg | ||
for (var key in data.ExclusiveStartKey) { | ||
msg = validateAttributeValue(data.ExclusiveStartKey[key]) | ||
msg = validations.validateAttributeValue(data.ExclusiveStartKey[key]) | ||
if (msg) return 'The provided starting key is invalid: ' + msg | ||
@@ -128,4 +120,4 @@ } | ||
msg = validateExpressions(data, ['ProjectionExpression', 'FilterExpression']) | ||
msg = validations.validateExpressions(data, ['ProjectionExpression', 'FilterExpression']) | ||
if (msg) return msg | ||
} |
@@ -1,5 +0,2 @@ | ||
var validateAttributeValue = require('./index').validateAttributeValue, | ||
validateAttributeConditions = require('./index').validateAttributeConditions, | ||
validateExpressionParams = require('./index').validateExpressionParams, | ||
validateExpressions = require('./index').validateExpressions | ||
var validations = require('./index') | ||
@@ -81,3 +78,3 @@ exports.types = { | ||
var msg = validateExpressionParams(data, | ||
var msg = validations.validateExpressionParams(data, | ||
['UpdateExpression', 'ConditionExpression'], | ||
@@ -88,3 +85,3 @@ ['AttributeUpdates', 'Expected']) | ||
for (var key in data.Key) { | ||
msg = validateAttributeValue(data.Key[key]) | ||
msg = validations.validateAttributeValue(data.Key[key]) | ||
if (msg) return msg | ||
@@ -95,3 +92,3 @@ } | ||
if (data.AttributeUpdates[key].Value != null) { | ||
msg = validateAttributeValue(data.AttributeUpdates[key].Value) | ||
msg = validations.validateAttributeValue(data.AttributeUpdates[key].Value) | ||
if (msg) return msg | ||
@@ -116,7 +113,7 @@ } | ||
msg = validateAttributeConditions(data) | ||
msg = validations.validateAttributeConditions(data) | ||
if (msg) return msg | ||
msg = validateExpressions(data) | ||
msg = validations.validateExpressions(data) | ||
if (msg) return msg | ||
} |
No v1
QualityPackage is not semver >=1. This means it is not stable and does not support ^ ranges.
Found 1 instance in 1 package
1
328511
9271
121