Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

dynalite

Package Overview
Dependencies
Maintainers
1
Versions
121
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

dynalite - npm Package Compare versions

Comparing version 0.0.18 to 0.0.19

5

actions/deleteItem.js

@@ -1,3 +0,2 @@

var db = require('../db'),
itemDb = db.itemDb
var db = require('../db')

@@ -9,3 +8,3 @@ module.exports = function deleteItem(data, cb) {

var key = db.validateKey(data.Key, table)
var key = db.validateKey(data.Key, table), itemDb = db.getItemDb(data.TableName)
if (key instanceof Error) return cb(key)

@@ -12,0 +11,0 @@

@@ -27,11 +27,15 @@ var db = require('../db'),

setTimeout(function() {
// TODO: Delete items too
tableDb.del(key, function(err) {
// TODO: Need to check this
if (err) console.error(err)
})
}, db.deleteTableMs)
db.deleteItemDb(key, function(err) {
if (err) return cb(err)
cb(null, {TableDescription: table})
setTimeout(function() {
// TODO: Delete items too
tableDb.del(key, function(err) {
// TODO: Need to check this
if (err) console.error(err)
})
}, db.deleteTableMs)
cb(null, {TableDescription: table})
})
})

@@ -38,0 +42,0 @@ })

11

actions/getItem.js

@@ -1,3 +0,2 @@

var db = require('../db'),
itemDb = db.itemDb
var db = require('../db')

@@ -9,3 +8,3 @@ module.exports = function getItem(data, cb) {

var key = db.validateKey(data.Key, table)
var key = db.validateKey(data.Key, table), itemDb = db.getItemDb(data.TableName)
if (key instanceof Error) return cb(key)

@@ -29,6 +28,4 @@

if (data.ReturnConsumedCapacity == 'TOTAL') {
var units = data.ConsistentRead ? 1 : 0.5
returnObj.ConsumedCapacity = {CapacityUnits: units, TableName: data.TableName}
}
if (data.ReturnConsumedCapacity == 'TOTAL')
returnObj.ConsumedCapacity = {CapacityUnits: db.capacityUnits(item, data.ConsistentRead), TableName: data.TableName}

@@ -35,0 +32,0 @@ cb(null, returnObj)

@@ -1,3 +0,2 @@

var db = require('../db'),
itemDb = db.itemDb
var db = require('../db')

@@ -9,3 +8,3 @@ module.exports = function putItem(data, cb) {

var key = db.validateItem(data.Item, table)
var key = db.validateItem(data.Item, table), itemDb = db.getItemDb(data.TableName)
if (key instanceof Error) return cb(key)

@@ -29,2 +28,5 @@

if (data.ReturnConsumedCapacity == 'TOTAL')
returnObj.ConsumedCapacity = {CapacityUnits: db.capacityUnits(data.Item, true), TableName: data.TableName}
itemDb.put(key, data.Item, function(err) {

@@ -31,0 +33,0 @@ if (err) return cb(err)

var once = require('once'),
lazy = require('lazy'),
Big = require('big.js'),
db = require('../db'),
itemDb = db.itemDb
db = require('../db')

@@ -13,9 +12,16 @@ module.exports = function scan(data, cb) {

var opts, vals, scannedCount = 0
var opts = {}, vals, scannedCount = 0, itemDb = db.getItemDb(data.TableName)
if (data.ExclusiveStartKey)
opts = {start: data.ExclusiveStartKey + '\x00'}
if (data.TotalSegments > 1) {
if (data.Segment > 0)
opts.start = ('00' + Math.ceil(4096 * data.Segment / data.TotalSegments).toString(16)).slice(-3)
opts.end = ('00' + (Math.ceil(4096 * (data.Segment + 1) / data.TotalSegments) - 1).toString(16)).slice(-3) + '\xff\xff'
}
vals = data.Segment > 0 ? lazy.range(0) : db.lazy(itemDb.createValueStream(opts), cb)
// TODO: Fix this
//if (data.ExclusiveStartKey)
//opts = {start: data.ExclusiveStartKey + '\x00'}
vals = db.lazy(itemDb.createValueStream(opts), cb)
if (data.Limit) vals = vals.take(data.Limit)

@@ -22,0 +28,0 @@

var Big = require('big.js'),
db = require('../db'),
itemDb = db.itemDb
db = require('../db')

@@ -10,3 +9,3 @@ module.exports = function updateItem(data, cb) {

var key = db.validateKey(data.Key, table)
var key = db.validateKey(data.Key, table), itemDb = db.getItemDb(data.TableName)
if (key instanceof Error) return cb(key)

@@ -13,0 +12,0 @@

@@ -6,8 +6,10 @@ var Readable = require('stream').Readable,

sublevel = require('level-sublevel'),
deleteStream = require('level-delete-stream'),
Lock = require('lock'),
Big = require('big.js')
Big = require('big.js'),
murmur = require('murmurhash-js')
var db = sublevel(levelup('./mydb', {db: function(location) { return new MemDown(location) }})),
tableDb = db.sublevel('table', {valueEncoding: 'json'}),
itemDb = db.sublevel('item', {valueEncoding: 'json'})
itemDbs = []

@@ -18,3 +20,4 @@ exports.createTableMs = 500

exports.tableDb = tableDb
exports.itemDb = itemDb
exports.getItemDb = getItemDb
exports.deleteItemDb = deleteItemDb
exports.getTable = getTable

@@ -25,6 +28,21 @@ exports.validateKey = validateKey

exports.checkConditional = checkConditional
exports.itemSize = itemSize
exports.capacityUnits = capacityUnits
tableDb.lock = new Lock()
itemDb.lock = new Lock()
function getItemDb(name) {
if (!itemDbs[name]) {
itemDbs[name] = db.sublevel('item-' + name, {valueEncoding: 'json'})
itemDbs[name].lock = new Lock()
}
return itemDbs[name]
}
function deleteItemDb(name, cb) {
var itemDb = itemDbs[name] || db.sublevel('item-' + name, {valueEncoding: 'json'})
delete itemDbs[name]
itemDb.createKeyStream().pipe(deleteStream(db, cb))
}
function getTable(name, checkStatus, cb) {

@@ -62,3 +80,3 @@ if (typeof checkStatus == 'function') cb = checkStatus

var keyStr = table.TableName, i, j, attr, type
var keyStr, i, j, attr, type, sizeError
for (i = 0; i < table.KeySchema.length; i++) {

@@ -71,2 +89,5 @@ attr = table.KeySchema[i].AttributeName

if (dataKey[attr][type] == null) return validationError()
sizeError = checkKeySize(dataKey[attr][type], type, !i)
if (sizeError) return sizeError
if (!keyStr) keyStr = hashPrefix(dataKey[attr][type])
keyStr += '\xff' + toLexiStr(dataKey[attr][type], type)

@@ -80,3 +101,3 @@ break

function validateItem(dataItem, table) {
var keyStr = table.TableName, i, j, attr, type
var keyStr, i, j, attr, type, sizeError
for (i = 0; i < table.KeySchema.length; i++) {

@@ -94,2 +115,5 @@ attr = table.KeySchema[i].AttributeName

' actual: ' + Object.keys(dataItem[attr])[0])
sizeError = checkKeySize(dataItem[attr][type], type, !i)
if (sizeError) return sizeError
if (!keyStr) keyStr = hashPrefix(dataItem[attr][type])
keyStr += '\xff' + toLexiStr(dataItem[attr][type], type)

@@ -102,2 +126,14 @@ break

function checkKeySize(keyPiece, type, isHash) {
// Numbers are always fine
if (type == 'N') return
if (type == 'B') keyPiece = new Buffer(keyPiece, 'base64')
if (isHash && keyPiece.length > 2048)
return validationError('One or more parameter values were invalid: ' +
'Size of hashkey has exceeded the maximum size limit of2048 bytes')
else if (!isHash && keyPiece.length > 1024)
return validationError('One or more parameter values were invalid: ' +
'Aggregated size of all range keys has exceeded the size limit of 1024 bytes')
}
// Creates lexigraphically sortable number strings

@@ -126,2 +162,7 @@ // 0 7c 009 = '07c009' = -99.1

// TODO: Not sure what sort of hashing algorithm is used
function hashPrefix(hashKey) {
return ('00' + (murmur(hashKey) % 4096).toString(16)).slice(-3)
}
function checkConditional(expected, existingItem) {

@@ -162,2 +203,40 @@ if (!expected) return

function itemSize(item) {
var size = 0, attr, type, val
for (attr in item) {
type = Object.keys(item[attr])[0]
val = item[attr][type]
size += attr.length
switch (type) {
case 'S':
size += val.length
break
case 'B':
size += new Buffer(val, 'base64').length
break
case 'N':
size += Math.ceil(Big(val).c.length / 2) + 1
break
case 'SS':
// TODO: Check this
size += val.reduce(function(sum, x) { return sum + x.length }, 0)
break
case 'BS':
// TODO: Check this
size += val.reduce(function(sum, x) { return sum + new Buffer(x, 'base64').length }, 0)
break
case 'NS':
// TODO: Check this
size += val.reduce(function(sum, x) { return sum + Math.ceil(Big(x).c.length / 2) + 1 }, 0)
break
}
}
return size
}
function capacityUnits(item, isDouble) {
var size = item ? Math.ceil(itemSize(item) / 1024) : 1
return size * (isDouble ? 1 : 0.5)
}
// TODO: Ensure that sets match

@@ -164,0 +243,0 @@ function valueEquals(val1, val2) {

{
"name": "dynalite",
"version": "0.0.18",
"version": "0.0.19",
"description": "A mock implementation of Amazon's DynamoDB built on LevelDB",

@@ -37,3 +37,5 @@ "main": "index.js",

"async": "~0.2.9",
"big.js": "~2.4.1"
"big.js": "~2.4.1",
"level-delete-stream": "0.0.1",
"murmurhash-js": "0.0.1"
},

@@ -40,0 +42,0 @@ "devDependencies": {

@@ -33,5 +33,5 @@ dynalite

* Use efficient range scans for Query calls
* Implement `ReturnConsumedCapacity`/`ConsumedCapacity`/`ReturnItemCollectionMetrics` on all relevant endpoints
* Implement `ReturnConsumedCapacity`/`ConsumedCapacity`/`ReturnItemCollectionMetrics` on all relevant endpoints (nearly done!)
* Check for any missing `ExclusiveStartKey`/`LastEvaluatedKey` functionality (most should be fine)
* Implement any outstanding secondary index behaviour

@@ -1,2 +0,3 @@

var validateAttributeValue = require('./index').validateAttributeValue
var db = require('../db'),
validateAttributeValue = require('./index').validateAttributeValue

@@ -101,3 +102,6 @@ exports.types = {

return 'ReturnValues can only be ALL_OLD or NONE'
if (db.itemSize(data.Item) > 65536)
return 'Item size has exceeded the maximum allowed size'
}
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc