aligned-block-file
Advanced tools
Comparing version 0.1.1 to 0.1.2
@@ -20,3 +20,3 @@ var fs = require('fs') | ||
module.exports = function (file, block_size, cache) { | ||
var cbs = [], br | ||
var cbs = [], br, writing = 0 | ||
cache = cache || Cache(1000) | ||
@@ -30,3 +30,2 @@ | ||
else { | ||
var buf = new Buffer(block_size) | ||
cbs[i] = [cb] | ||
@@ -36,3 +35,3 @@ file.get(i, function (err, buf, bytes_read) { | ||
cbs[i] = null | ||
cache.set(i, buf) | ||
if(!err) cache.set(i, buf) | ||
while(cb.length) cb.shift()(err, buf, bytes_read) | ||
@@ -62,3 +61,14 @@ }) | ||
if(start < end) next(i+1) | ||
else cb(null, bufs.length == 1 ? bufs[0] : Buffer.concat(bufs), bytes_read) | ||
else { | ||
var buffer = bufs.length == 1 ? bufs[0] : Buffer.concat(bufs) | ||
if(!buffer.length) | ||
return cb(new Error('read an empty buffer at:'+start + ' to ' + end + '\n'+ | ||
JSON.stringify({ | ||
start: start, end: end, i:i, | ||
bytes_read: bytes_read, | ||
bufs: bufs | ||
})) | ||
) | ||
cb(null, buffer, bytes_read) | ||
} | ||
}) | ||
@@ -69,2 +79,11 @@ })(i) | ||
//start by reading the end of the last block. | ||
//this must always be kept in memory. | ||
// var offset = Obv() | ||
// file.offset.once(function (size) { | ||
// get(~~(size/block_size], function (err, last) { | ||
// // | ||
// }) | ||
// }) | ||
// | ||
return br = { | ||
@@ -86,2 +105,3 @@ read: read, | ||
//if successful, copy into cache. | ||
if(writing++) throw new Error('already appending to this file') | ||
file.offset.once(function (_offset) { | ||
@@ -92,29 +112,40 @@ | ||
var i = ~~(start/block_size) | ||
while(b_start < buf.length) { //start < _offset+buf.length) { | ||
var block_start = i*block_size | ||
var b = cache.get(i) | ||
if(null == b) { | ||
b = new Buffer(block_size) | ||
b.fill(0) | ||
cache.set(i, b) | ||
if(i*block_size < _offset) //usually true, unless file length is multiple of block_size | ||
get(i, function (err) { //this will add the last block to the cache. | ||
if(err) cb(explain(err, 'precache before append failed')) | ||
else next() | ||
}) | ||
else next() | ||
function next () { | ||
while(b_start < buf.length) { //start < _offset+buf.length) { | ||
var block_start = i*block_size | ||
var b = cache.get(i) | ||
if(null == b) { | ||
b = new Buffer(block_size) | ||
b.fill(0) | ||
cache.set(i, b) | ||
} | ||
//including if set in above if... | ||
if(Buffer.isBuffer(b)) { | ||
var len = Math.min(block_size - (start - block_start), block_size) | ||
buf.copy(b, start - block_start, b_start, b_start + len) | ||
start += len | ||
b_start += len | ||
} | ||
else if(Array.isArray(cbs[i])) | ||
throw new Error('should never happen: new block should be initialized, before a read ever happens') | ||
else { | ||
start += block_size | ||
} | ||
i++ | ||
} | ||
//including if set in above if... | ||
if(Buffer.isBuffer(b)) { | ||
var len = Math.min(block_size - (start - block_start), block_size) | ||
buf.copy(b, start - block_start, b_start, b_start + len) | ||
start += len | ||
b_start += len | ||
} | ||
else if(Array.isArray(cbs[i])) | ||
throw new Error('should never happen: new block should be initialized, before a read ever happens') | ||
else | ||
start += block_size | ||
i++ | ||
file.append(buf, function (err, offset) { | ||
if(err) return cb(err) | ||
writing = 0 | ||
cb(null, offset) | ||
}) | ||
} | ||
file.append(buf, function (err, offset) { | ||
if(err) return cb(err) | ||
cb(null, offset) | ||
}) | ||
}) | ||
@@ -125,1 +156,2 @@ } | ||
29
file.js
@@ -24,7 +24,23 @@ var fs = require('fs') | ||
get: function (i, cb) { | ||
fd.once(function (_fd) { | ||
offset.once(function (_offset) { | ||
var max = ~~(_offset / block_size) | ||
if(i > max) | ||
return cb(new Error('aligned-block-file/file.get: requested block index was greater than max, got:'+i+', expected less than or equal to:'+max)) | ||
var buf = new Buffer(block_size) | ||
buf.fill(0) //security | ||
fs.read(_fd, buf, 0, block_size, i*block_size, function (err, bytes_read) { | ||
cb(err, buf, bytes_read) | ||
fs.read(fd.value, buf, 0, block_size, i*block_size, function (err, bytes_read) { | ||
if(err) cb(err) | ||
else if( | ||
//if bytes_read is wrong | ||
buf.length !== bytes_read && | ||
//unless this is the very last block and it is incomplete. | ||
!((i*block_size + bytes_read) == offset.value) | ||
) | ||
cb(new Error( | ||
'aligned-block-file/flie.get: did not read whole block, expected length:'+ | ||
block_size+' but got:'+bytes_read | ||
)) | ||
else | ||
cb(null, buf, bytes_read) | ||
}) | ||
@@ -41,2 +57,4 @@ }) | ||
writing = 0 | ||
if(err) return cb(err) | ||
if(written !== buf.length) return cb(new Error('wrote less bytes than expected:'+written+', but wanted:'+buf.length)) | ||
offset.set(_offset+written) | ||
@@ -56,1 +74,6 @@ cb(null, _offset+written) | ||
{ | ||
"name": "aligned-block-file", | ||
"description": "", | ||
"version": "0.1.1", | ||
"version": "0.1.2", | ||
"homepage": "https://github.com/flumedb/aligned-block-file", | ||
@@ -6,0 +6,0 @@ "repository": { |
@@ -8,82 +8,156 @@ | ||
var filename = '/tmp/test_block-reader_'+Date.now() | ||
var blocks = Blocks(File(filename, 32, 'a+'), 32) | ||
var store = {} | ||
var cache = { | ||
get: function (i) { return store[i] }, | ||
set: function (i, v) { store[i] = v } | ||
} | ||
var a = new Buffer(32) | ||
a.fill('a') | ||
module.exports = function (reduce) { | ||
var b = new Buffer(32) | ||
b.fill('b') | ||
var filename = '/tmp/test_block-reader_'+Date.now() | ||
var blocks = reduce(null) | ||
var c = new Buffer(32) | ||
c.fill('c') | ||
var a = new Buffer(32) | ||
a.fill('a') | ||
tape('append one block', function (t) { | ||
var b = new Buffer(32) | ||
b.fill('b') | ||
blocks.append(a, function (err, offset) { | ||
if(err) throw err | ||
t.equal(offset, 32) | ||
t.end() | ||
}) | ||
var c = new Buffer(32) | ||
c.fill('c') | ||
var d = new Buffer(32) | ||
d.fill('d') | ||
var e = new Buffer(24) | ||
e.fill('e') | ||
var f = new Buffer(64) | ||
f.fill('f') | ||
}) | ||
tape('append one block', function (t) { | ||
blocks.append(a, function (err, offset) { | ||
if(err) throw err | ||
t.equal(offset, 32) | ||
t.end() | ||
}) | ||
tape('append another block', function (t) { | ||
blocks.append(b, function (err, offset) { | ||
if(err) throw err | ||
t.equal(offset, 64) | ||
t.end() | ||
}) | ||
}) | ||
tape('append a half block', function (t) { | ||
tape('append another block', function (t) { | ||
blocks = reduce(blocks) | ||
blocks.append(b, function (err, offset) { | ||
if(err) throw err | ||
t.equal(offset, 64) | ||
t.end() | ||
}) | ||
blocks.append(c.slice(0, 16), function (err, offset) { | ||
if(err) throw err | ||
t.equal(offset, 80) | ||
t.end() | ||
}) | ||
}) | ||
tape('append a half block', function (t) { | ||
blocks = reduce(blocks) | ||
blocks.append(c.slice(0, 16), function (err, offset) { | ||
if(err) throw err | ||
t.equal(offset, 80) | ||
t.end() | ||
}) | ||
tape('read last block', function (t) { | ||
blocks.read(64, 96, function (err, _c) { | ||
if(err) throw err | ||
console.log(_c) | ||
t.deepEqual(_c.slice(0, 16), c.slice(0, 16)) | ||
t.end() | ||
}) | ||
}) | ||
tape('append another half block', function (t) { | ||
blocks.append(c.slice(0, 16), function (err, offset) { | ||
if(err) throw err | ||
t.equal(offset, 96) | ||
tape('read last block', function (t) { | ||
blocks = reduce(blocks) | ||
blocks.read(64, 96, function (err, _c) { | ||
if(err) throw err | ||
console.log(_c) | ||
t.deepEqual(_c, c) | ||
t.deepEqual(_c.slice(0, 16), c.slice(0, 16)) | ||
t.end() | ||
}) | ||
}) | ||
tape('append another half block', function (t) { | ||
blocks = reduce(blocks) // Blocks(File(filename, 32, 'a+'), 32) | ||
blocks.append(c.slice(0, 16), function (err, offset) { | ||
if(err) throw err | ||
t.equal(offset, 96) | ||
blocks.read(64, 96, function (err, _c) { | ||
if(err) throw err | ||
console.log(_c) | ||
t.deepEqual(_c, c) | ||
t.end() | ||
}) | ||
}) | ||
}) | ||
}) | ||
tape('appending in parallel throws', function (t) { | ||
blocks.append(a, function (err, offset) { | ||
if(err) throw err | ||
t.equal(offset, 128) | ||
t.end() | ||
tape('appending in parallel throws', function (t) { | ||
blocks = reduce(blocks) | ||
blocks.append(a, function (err, offset) { | ||
if(err) throw err | ||
t.equal(offset, 128) | ||
t.end() | ||
}) | ||
t.throws(function () { | ||
blocks.append(b, function (err, offset) { | ||
t.fail('should never be called') | ||
}) | ||
}) | ||
}) | ||
t.throws(function () { | ||
blocks.append(b, function (err, offset) { | ||
t.fail('should never be called') | ||
tape('read in parallel with append', function (t) { | ||
store = {} //clear the cache | ||
blocks.offset.once(function (o) { | ||
blocks.append(c, function (err, _o) { | ||
t.equal(160, _o) | ||
t.end() | ||
}) | ||
blocks.read(o, o+16, function (err, buf) { | ||
if(err) throw err | ||
t.deepEqual(buf, c.slice(0, 16)) | ||
}) | ||
}) | ||
}) | ||
}) | ||
tape('append half block, then overlapping block', function (t) { | ||
blocks = reduce(blocks) | ||
blocks.append(e, function (err, offset) { | ||
if(err) throw err | ||
t.equal(offset, 184) | ||
blocks.read(144, 176, function (err, data) { | ||
if(err) throw err | ||
console.log(err, data) | ||
store = {} | ||
blocks.append(f, function (err, offset) { | ||
blocks.read(176, 180, function (err, data) { | ||
if(err) throw err | ||
console.log(err, data) | ||
if(err) throw err | ||
t.equal(offset, 248) | ||
console.log(store) | ||
t.end() | ||
}) | ||
}) | ||
}) | ||
}) | ||
}) | ||
} | ||
if(!module.parent) { | ||
var filename = '/tmp/test_block-reader_'+Date.now() | ||
module.exports(function (b) { | ||
return b ? b : Blocks(File(filename, 32, 'a+'), 32) | ||
}) | ||
} | ||
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
15187
402