Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

block-file

Package Overview
Dependencies
Maintainers
1
Versions
8
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

block-file - npm Package Compare versions

Comparing version 1.0.3 to 1.1.0

645

lib/block_file.js

@@ -15,4 +15,3 @@ // File: block_file.js

var u = require('lodash') //require('underscore')
, Y = require('ya-promise')
, pfs = require('./y-fs')
, async = require('async')
, Stats = require('stats-api')

@@ -22,3 +21,2 @@

var utils = require('./utils')
, seqp = utils.seqp
, validateCRC = utils.validateCRC

@@ -38,5 +36,5 @@ , signCRC = utils.signCRC

bfStats.createStat("tt_store ns ravg", Stats.RunningAverage
, { stat: bfStats.get("tt_store ns"), nelts: 1000 })
, { stat: bfStats.get("tt_store ns") })
bfStats.createStat("tt_load ns ravg", Stats.RunningAverage
, { stat: bfStats.get("tt_load ns"), nelts: 1000 })
, { stat: bfStats.get("tt_load ns") })
bfStats.createHistogram("hog tt_store ns", "tt_store ns", Stats.semiLogNS)

@@ -88,3 +86,2 @@ bfStats.createHistogram("hog tt_load ns", "tt_load ns", Stats.semiLogNS)

//
BlockFile.Props = Props
BlockFile.BlockFile = BlockFile

@@ -100,10 +97,12 @@ BlockFile.Handle = Handle

*/
BlockFile.create = BlockFile_create
function BlockFile_create(filename, metaProps_) {
metaProps_ = metaProps_ || {}
BlockFile.create = function(filename, metaProps_, createcb){
if (typeof createcb == 'undefined' && typeof metaProps_ == 'function') {
// if (arguments.length == 2) {
createcb = metaProps_
metaProps_ = {}
}
assert(u.isPlainObject(metaProps_), "metaProps argument is not a plain object")
var metaProps = u.defaults( u.clone(metaProps_)
, Props.defaultProps.metaProps())
var metaProps = u.defaults(u.clone(metaProps_), Props.defaultProps.metaProps())

@@ -124,2 +123,3 @@ Props.assertValidMetaProps(metaProps)

// 'metaProps'

@@ -142,27 +142,49 @@ //

return pfs.open(filename, 'wx+')
.then(
function(fd_){
fd = fd_
return pfs.write( fd
, mdBuf /*buffer*/
, 0 /*buffer offset*/
, mdBuf.length /*number of bytes to write*/
, BlockFile.MD_OFFSET_PRIMARY) /*file position*/
})
.spread(
function(bytesWritten, buffer){
return pfs.write( fd
, mdBuf /*buffer*/
, 0 /*buffer offset*/
, mdBuf.length /*number of bytes to write*/
, BlockFile.MD_OFFSET_SECONDARY) /*file position*/
})
.spread(
function(bytesWritten, buffer){
return pfs.close(fd)
})
} //BockFile__create()
async.waterfall(
[
function(cb) {
//open in write & exclusive mode; ie errors out if file exists
fs.open(filename, 'wx+', mode, cb)
}
// callback from fs.open is cb(err, fd)
, function(fd_, cb) {
fd = fd_
// write header primary
fs.write( fd
, mdBuf /*buffer*/
, 0 /*buffer offset*/
, mdBuf.length /*number of bytes to read*/
, BlockFile.MD_OFFSET_PRIMARY /*file position*/
, cb )
}
// callback from fs.write is cb(err, written, buffer)
, function(bytesWritten, buffer, cb) {
// write header backup
fs.write( fd
, mdBuf /*buffer*/
, 0 /*buffer offset*/
, mdBuf.length /*number of bytes to read*/
, BlockFile.MD_OFFSET_SECONDARY /*file position*/
, cb )
}
// callback from fs.write is cb(err, written, buffer)
, function(written, buffer, cb) {
fs.close(fd, cb)
}
// call back from fs.close is cb(err)
, function(cb) {
BlockFile.open(filename, metaProps, cb)
}
],
// callback from fs.close is cb(err) or error in waterfall
createcb
)
} //BockFile.create()
/** Open a BlockFile

@@ -172,12 +194,17 @@ *

* @param {Props} [props] in case we end up creating the file.
* @param {function} opencb opencb(err, bf)
*/
BlockFile.open = BlockFile_open
function BlockFile_open(filename, metaProps) {
if ( u.isUndefined(metaProps) )
metaProps = Props.defaultProps.metaProps()
else
metaProps = u.defaults(u.clone(metaProps), Props.defaultProps.metaProps())
BlockFile.open = function(filename, metaProps, opencb){
if (typeof opencb == 'undefined' && typeof metaProps == 'function') {
// if (arguments.length == 2) {
opencb = metaProps
metaProps = undefined
//props = undefined
}
metaProps = u.defaults(u.clone(metaProps||{}), Props.defaultProps.metaProps())
var stat, fd, bf
, mdBuf = new Buffer(BlockFile.MD_BLOCKSIZE)
, passed

@@ -187,86 +214,111 @@ mdBuf.fill(0)

assert.ok(typeof filename === 'string', "filename not a function")
assert.ok(typeof opencb === 'function', "opencb not a function")
var mdOffsetCRC32 = BlockFile.MD_MAP['crc32 value']
, fileOffsetPri = BlockFile.MD_OFFSET_PRIMARY
, fileOffsetSec = BlockFile.MD_OFFSET_SECONDARY
async.waterfall(
[
// stat(filename, ...)
function(cb) {
fs.stat(filename, function(err, stat) {
if (err && err.code === 'ENOENT') {
BlockFile.create(filename, metaProps, function(err) {
if (err) {
cb(err)
return
}
fs.stat(filename, cb)
})
return
}
cb(null, stat)
})
}
, function(stat_, cb) {
stat = stat_
return pfs.stat(filename)
.then(
function(stat_){ stat = stat_ }
, function(err){
if (err.code === 'ENOENT') //fix it
return BlockFile.create(filename, metaProps).then(
function(){ return pfs.stat(filename).then(
function(stat_){ stat = stat_ }) }
)
return err
})
.then(
function(){
if (!stat.isFile())
throw new Error("file, "+filename+", is not at regular file.")
if (stat.size < BlockFile.MD_HDRSIZE)
throw new Error(format("file, %s, is not big enough to hold the metadata header blocks; %d < %d", filename, stat.size, BlockFile.MD_HDRSIZE))
return pfs.open(filename, 'r+')
})
.then(
function(fd_){
fd = fd_
return pfs.read( fd
, mdBuf /*buffer*/
, 0 /*buffer offset*/
, mdBuf.length /*number of bytes to read*/
, fileOffsetPri ) /*file position*/
})
.spread(
function(bytesRead, buffer){
var fix
if ( !validateCRC32(buffer, mdOffsetCRC32) ) {
fix = pfs.read( fd
, buffer /*buffer*/
, 0 /*buffer offset*/
, buffer.length /*number of bytes to read*/
, fileOffsetSec ) /*file position*/
.spread(
function(bytesRead, buffer){
if ( !validateCRC32(buffer, mdOffsetCRC32) ){
throw new Error("PRIMARY & SECONDARY file header blocks are invalid")
}
//SECONDARY good, need to fix PRIMARY
return pfs.write( fd
, mdBuf
, 0
, mdBuf.length
, BlockFile.MD_OFFSET_PRIMARY)
})
return fix
}
return [bytesRead, buffer]
})
.spread(
function(bytesRead, buffer){
var mpStrLen, mpStr, metaProps, props
if (!stat.isFile()) { // filname is not a regular file
cb(new Error("file, "+filename+", is not at regular file."))
return /* no fall thru */
}
mpStrLen = buffer.readUInt16BE(BlockFile.MD_MAP['metaProps'])
mpStr = buffer.toString( 'utf8'
, BlockFile.MD_MAP['metaProps']+2
, BlockFile.MD_MAP['metaProps']+2 + mpStrLen)
if (stat.size < BlockFile.MD_HDRSIZE) {
cb(new Error(format("file, %s, is not big enough to hold the metadata header blocks; %d < %d", filename, stat.size, BlockFile.MD_HDRSIZE)))
return
}
metaProps = JSON.parse(mpStr)
fs.open(filename, 'r+', cb)
}
, function(fd_, cb) {
fd = fd_
props = new Props(metaProps)
//read PRIMARY file metadata block
fs.read( fd
, mdBuf /*buffer*/
, 0 /*buffer offset*/
, mdBuf.length /*number of bytes to read*/
, BlockFile.MD_OFFSET_PRIMARY /*file position*/
, cb )
}
, function(bytesRead, buffer, cb) {
if ( !validateCRC32(buffer, BlockFile.MD_MAP['crc32 value']) ) {
passed = false
//read SECONDARY file metadata block
fs.read( fd
, buffer /*buffer*/
, 0 /*buffer offset*/
, buffer.length /*number of bytes to read*/
, BlockFile.MD_OFFSET_SECONDARY /*file position*/
, cb )
return
}
passed = true
cb(null, bytesRead, buffer)
}
, function(bytesRead, buffer, cb){
if (passed) {
cb(null, bytesRead, buffer);
return
}
if (!validateCRC32(self.buffer, BlockFile.MD_MAP['crc32 value'])) {
//PRIMARY & SECONDARY are invalid
cb(new Error("PRIMARY & SECONDARY file header blocks are invalid"))
return
}
//SECONDARY good, need to fix PRIMARY
fs.write( fd
, buffer
, 0
, buffer.length
, BlockFile.MD_OFFSETPRIMARY
, cb )
}
, function(bytesWritten, buffer, cb){
//read metaProps & construct props
var mpStrLen, mpStr, metaProps, props
// new BlockFile && initialize
bf = new BlockFile(filename, fd, mdBuf, props)
return bf.initialize(stat)
})
} //BlockFile__open()
mpStrLen = buffer.readUInt16BE(BlockFile.MD_MAP['metaProps'])
mpStr = buffer.toString( 'utf8'
, BlockFile.MD_MAP['metaProps']+2
, BlockFile.MD_MAP['metaProps']+2 + mpStrLen )
metaProps = JSON.parse(mpStr)
props = new Props(metaProps)
// new BlockFile && initialize
bf = new BlockFile(filename, fd, mdBuf, props)
bf.initialize(stat, cb)
}
],
/* BlockFile__initialized: called with (err) arguments */
opencb
)
} //BlockFile.open()
/** Initialize BlockFile object ex. read in segments
*
* @param {fs.Stats} stat
* @param {fs.Stats} stat the fstat of the file to initialize
* @param {function} initcb initcb(err, bf)
*/
BlockFile.prototype.initialize = BlockFile__initialize
function BlockFile__initialize(stat) {
BlockFile.prototype.initialize = function(stat, initcb){
assert.ok(stat instanceof fs.Stats)

@@ -281,3 +333,4 @@ var self = this

, fileSize, BlockFile.MD_HDRSIZE )
throw new Error(errStr)
initcb(new Error(errStr))
return
}

@@ -291,3 +344,4 @@

, calcNumSegs))
throw err
initcb(err)
return
}

@@ -301,31 +355,37 @@

err = new Error(format("Invalid 'number of segments' %d", fileNumSegs))
throw err
initcb(err)
return
}
assert.strictEqual(calcNumSegs, fileNumSegs, "calcNumSegs !== fileNumSegs")
//console.error("\ncalcNumSegs = %j", calcNumSegs)
//console.error("fileNumSegs = %j", fileNumSegs)
//for 0..segment number -1
// read segment
async.whilst(
/* test */
function(){ return segNum < fileNumSegs }
/* body */
, function(untilcb){
self._readSegment(segNum, untilcb)
segNum += 1
}
/* finished or err */
, function(err){
initcb(err, self)
}
)
} //.initialize()
var p = Y.resolved()
u.range(fileNumSegs).forEach(function(e,i){
p = p.then(function(){ return self._readSegment(i) })
})
return p.then(function(){ return self })
} //BlockFile__initialize()
/**
* Close out BlockFile resources. eg .fd
*
* @param {function} cb cb(err)
*/
BlockFile.prototype.close = BlockFile__close
function BlockFile__close() {
BlockFile.prototype.close = function(cb){
var self = this
return self.writeSegments()
.then(function(){ return self.writeHeader() })
.then(function(){ return pfs.close(self.fd) })
} //BlockFile__close()
async.series( [ function(scb) { self.writeSegments(scb) }
, function(scb) { self.writeHeader(scb) }
, function(scb) { fs.close(self.fd, scb) } ]
, function(err, res){ if (err) cb(err); else cb() } )
} //.close()

@@ -336,7 +396,10 @@

*
* @param {function} cb cb(err)
*/
BlockFile.prototype.writeHeader = BlockFile__writeHeader
function BlockFile__writeHeader() {
BlockFile.prototype.writeHeader = function(cb){
var self = this
//, mdBuf = new Buffer(this.props.blockSize())
//mdBuf.fill(0)
var numSeg = self.segments.length

@@ -351,15 +414,23 @@ this.mdBuf.writeDoubleBE(numSeg, BlockFile.MD_MAP['number of segments'])

return pfs.write( self.fd
, self.mdBuf
, 0
, self.mdBuf.length
, BlockFile.MD_OFFSET_PRIMARY )
.then(function(){
pfs.write( self.fd
, self.mdBuf
, 0
, self.mdBuf.length
, BlockFile.MD_OFFSET_SECONDARY )
})
} //BlockFile__writeHeader()
async.series(
[
function(scb){
fs.write( self.fd
, self.mdBuf /*buffer*/
, 0 /*buffer offset*/
, self.mdBuf.length /*number of bytes to read*/
, BlockFile.MD_OFFSET_PRIMARY /*file position*/
, scb )
}
, function(scb){
fs.write( self.fd
, self.mdBuf /*buffer*/
, 0 /*buffer offset*/
, self.mdBuf.length /*number of bytes to read*/
, BlockFile.MD_OFFSET_SECONDARY /*file position*/
, scb )
}
]
, function(err, res){ if (err) cb(err); else cb() })
} //.writeHeader()

@@ -371,4 +442,3 @@

*/
BlockFile.prototype.addSegment = BlockFile__addSegment
function BlockFile__addSegment() {
BlockFile.prototype.addSegment = function(){
var self = this

@@ -378,11 +448,13 @@ , nextSegNum = self.segments.length

//log.info("addSegment called nextSegNum="+nextSegNum)
buffer.fill(0xff) //completely empty
this.segments.push(new Segment(nextSegNum, buffer, this.fsmType, this.props))
this.segments.push( new Segment(nextSegNum, buffer, this.fsmType, this.props) )
return nextSegNum
} //BlockFile__addSegment()
} //.addSegment()
BlockFile.prototype._calcSegOff = function(segNum) {
BlockFile.prototype._calcSegOff = function(segNum){
return BlockFile.MD_HDRSIZE + ( segNum * this.props.maxSegSize() )

@@ -392,3 +464,3 @@ }

BlockFile.prototype._calcBlkOff = function(segNum, blkNum) {
BlockFile.prototype._calcBlkOff = function(segNum, blkNum){
return this._calcSegOff(segNum) +

@@ -403,49 +475,67 @@ this.props.segHdrSize() +

* @param {number} segNum
* @param {Function} cb Callback (err, segment, segNum)
*/
BlockFile.prototype._readSegment = BlockFile__readSegment
function BlockFile__readSegment(segNum) {
BlockFile.prototype._readSegment = function(segNum, cb){
var self = this
, seg, fsmOff, fsmBuf
fsmBuf = new Buffer(this.props.fsmSize())
function finish(err, buffer) {
if (err) {
cb(err)
return
}
var segOffset = self._calcSegOff(segNum)
, fileOffsetFsmPri = segOffset + self.props.fsmOffsetPrimary()
, fileOffsetFsmSec = segOffset + self.props.fsmOffsetSecondary()
self.segments[segNum] = new Segment(segNum, buffer, self.fsmType, self.props)
return pfs.read( self.fd
, fsmBuf /*buffer*/
, 0 /*buffer offset*/
, fsmBuf.length /*number of bytes to read*/
, fileOffsetFsmPri ) /*file position*/
.spread(
function(bytesRead, buffer){
var fix
if ( !validateCRC(buffer, self.props) ) {
fix = pfs.read( self.fd
, fsmBuf /*buffer*/
, 0 /*buffer offset*/
, fsmBuf.length /*number of bytes to read*/
, fileOffsetFsmSec ) /*file position*/
.spread(
function(bytesRead, buffer){
if ( !validateCRC(buffer, self.props) ) {
throw new Error(format("PRIMARY & SECONDARY FSM for Segment %d are invalid", segNum))
}
//SECONDARY good, need to fix PRIMARY
return pfs.write( self.fd
, buffer
, 0
, buffer.length
, fileOffsetFsmPri )
})
}
return [bytesRead, buffer]
})
.spread(function(bytesRead, buffer){
self.segments[segNum] = new Segment(segNum, buffer, self.fsmType, self.props)
return self.segments[segNum]
})
cb(null, self.segments[segNum])
} //finish()
} //BlockFile__readSegment()
fsmBuf = new Buffer(this.props.fsmSize())
async.waterfall(
[
//read PRIMARY FSM
function(wfcb){
var fsmOff = self._calcSegOff(segNum) + self.props.fsmOffsetPrimary()
fs.read( self.fd
, fsmBuf /*buffer*/
, 0 /*buffer offset*/
, fsmBuf.length /*number of bytes to read*/
, fsmOff /*file position*/
, wfcb )
}
//validate xor read BACKUP FSM
, function(bRead, buffer, wfcb) {
if ( !validateCRC(buffer, self.props) ) {
var fsmOff = self._calcSegOff(segNum) + self.props.fsmOffsetSecondary()
fs.read( self.fd
, fsmBuf /*buffer*/
, 0 /*buffer offset*/
, fsmBuf.length /*number of bytes to read*/
, fsmOff /*file position*/
, wfcb )
return
}
finish(null, buffer)
}
// validate BACKUP and write PRIMARY
, function(bRead, buffer, wfcb) {
if ( !validateCRC(buffer, self.props) ) {
wfcb( new Error(format("PRIMARY & BACKUP FSM for Segment %d inavalid", segNum)) )
return
}
var fsmOff = self._calcSegOff(segNum) + self.props.fsmOffsetPrimary()
fs.write( self.fd
, buffer /*buffer*/
, 0 /*buffer offset*/
, buffer.length /*number of bytes to read*/
, fsmOff /*file position*/
, wfcb )
}
],
//written
function(err, bWrit, buffer, wfcb) {
finish(err, buffer)
}
)
} //._readSegment()

@@ -457,5 +547,5 @@

* @param {number} segNum
* @param {Function} cb Callback (err)
*/
BlockFile.prototype._writeSegment = BlockFile___writeSegment
function BlockFile___writeSegment(segNum) {
BlockFile.prototype._writeSegment = function(segNum, cb){
var self = this

@@ -468,43 +558,50 @@ , seg = this.segments[segNum]

var segOffset = self._calcSegOff(segNum)
, fileOffsetFsmPri = segOffset + self.props.fsmOffsetPrimary()
, fileOffsetFsmSec = segOffset + self.props.fsmOffsetSecondary()
async.waterfall(
[ //write PRIMARY
function(wfcb){
var fsmOff = self._calcSegOff(segNum) + self.props.fsmOffsetPrimary()
fs.write( self.fd
, seg.buffer
, 0
, seg.buffer.length
, fsmOff
, wfcb)
}
//write BACKUP
, function(bWrit, buffer, wfcb){
var fsmOff = self._calcSegOff(segNum) + self.props.fsmOffsetSecondary()
fs.write( self.fd
, seg.buffer
, 0
, seg.buffer.length
, fsmOff
, wfcb)
}
]
, function(err, bWrit, buffer){
if (err) { cb(err); return }
cb(null)
})
} //._writeSegment()
return pfs.write( self.fd
, seg.buffer
, 0
, seg.buffer.length
, fileOffsetFsmPri )
.then( function(v){ /*ignore v*/
return pfs.write( self.fd
, seg.buffer
, 0
, seg.buffer.length
, fileOffsetFsmSec ) } )
.then( function(v){ return /*drop v*/ } )
} //BlockFile___writeSegment()
/**
* Write all dirty segments in reverse order.
*
* @param {function} [cb] cb(err)
*/
BlockFile.prototype.writeSegments = BlockFile__writeSegments
function BlockFile__writeSegments() {
BlockFile.prototype.writeSegments = function(cb){
var self = this
, p = Y.resolved()
cb = cb || function(){}
var segs = u.filter(self.segments, function(seg){ return seg.dirty })
.reverse()
if (segs.length > 0) {
segs.reverse().forEach(function(seg){
p = p.then(function(){ return self._writeSegment( seg.segNum ) })
})
}
async.mapSeries( segs
, function(seg, cb){ self._writeSegment(seg.segNum, cb) }
, function(err, res){ if (err) cb(err); else cb() } )
} //.writeSegments()
return p
} //BlockFile__writeSegments()
/**

@@ -517,4 +614,3 @@ * Release blocks, described by hdl, from BlockFile

*/
BlockFile.prototype.release = BlockFile__release
function BlockFile__release(hdl) {
BlockFile.prototype.release = function(hdl){

@@ -525,5 +621,3 @@ var seg = this.segment[hdl.segNum]

if (spanNum !== hdl.spanNum)
throw new InvalidHandleError()
} //BlockFile__release()
} //.release()

@@ -537,4 +631,3 @@

*/
BlockFile.prototype._reserve = BlockFile___reserve
function BlockFile___reserve(numBlks) {
BlockFile.prototype.reserve = function(numBlks){
var len, hdl, segNum

@@ -553,3 +646,3 @@

return hdl
} //BlockFile___reserve()
} //.reserve()

@@ -561,56 +654,68 @@

* @param {Handle} hdl Handle object describes where to write buffer
* @param {Function} cb Callback (err, buffer, handle)
*/
BlockFile.prototype.load = BlockFile__load
function BlockFile__load(hdl) {
var buffer = new Buffer(this.props.blockSize() *(1+hdl.spanNum))
BlockFile.prototype.load = function(hdl, cb){
var buffer = new Buffer(this.props.blockSize() * (1+hdl.spanNum))
, blkOff = this._calcBlkOff(hdl.segNum, hdl.blkNum)
, doneNS = bfStats.get("tt_load ns").start()
, readp, loadp
readp = pfs.read( this.fd
, buffer /*buffer*/
, 0 /*buffer position*/
, buffer.length /*number of bytes to read*/
, blkOff /*file position*/)
fs.read( this.fd
, buffer /*buffer*/
, 0 /*buffer position*/
, buffer.length /*number of bytes to read*/
, blkOff /*file position*/
, function(err, bRead, buf){
if (err) { cb(err); return }
doneNS()
cb(null, buf, hdl)
} )
} //.load()
readp.then(function(){ doneNS() }) //need to call doneNS w/o arguments
loadp = readp.spread(function(bytesRead, buffer){
return [buffer, hdl]
})
return loadp
} //BlockFile__load()
/**
* Store block buffer
* Store block buffer copy-on-write semantics
*
* @param {Buffer} buffer Buffer object that is written to hdl location
* @returns {promise}
* @param {Function} cb Callback (err, handle)
* @returns {Handle}
*/
BlockFile.prototype.store = function store(buffer, hdl){
var numBlks, blkOff, writep, storep
, doneNS = bfStats.get("tt_store ns").start()
BlockFile.prototype.store = function(buffer, cb){
var numBlks, len, segNum, hdl, blkOff
numBlks = Math.ceil( buffer.length / this.props.blockSize() )
hdl = hdl || this._reserve(numBlks)
hdl = this.reserve(numBlks)
blkOff = this._calcBlkOff(hdl.segNum, hdl.blkNum)
writep = pfs.write( this.fd
, buffer
, 0 /*buffer position*/
, buffer.length /*number of bytes*/
, blkOff /*file position*/ )
this._store(buffer, hdl, cb)
writep.then(function(){ doneNS() }) //need to call doneNS w/o arguments
return hdl
} //.store()
storep = writep.spread(function(bytesWritten, buf){
return hdl
})
return storep
}
/**
* Store block buffer to an exact location
*
* @param {Buffer} buffer
* @param {Handle} hdl
* @param {function} cb cb(err, hdl)
*/
BlockFile.prototype._store = function(buffer, hdl, cb){
var blkOff
, doneNS = bfStats.get("tt_store ns").start()
blkOff = this._calcBlkOff(hdl.segNum, hdl.blkNum)
fs.write( this.fd
, buffer
, 0 /*buffer position*/
, buffer.length /*number of bytes*/
, blkOff /*file position*/
, function(err, bytesWritten, buf){
if (err) { cb(err); return }
doneNS() //ns timing
cb(null, hdl)
}
)
} //._store()
//THE END

@@ -88,4 +88,3 @@

*/
NaiveFSM.equal = NaiveFSM_equal
function NaiveFSM_equal(a, b) {
NaiveFSM.equal = function(a, b){
//a.spans [0..15][0..N] => begBlkNum in order lowest to highest

@@ -100,3 +99,3 @@ //a.fsiBeg {begBlkNum: spanNum}

return true
} //NaiveFSM_equal()
} //NaiveFSM.equal()

@@ -109,6 +108,5 @@

*/
NaiveFSM.prototype.equal = NaiveFSM__equal
function NaiveFSM__equal(fsm) {
return NaiveFSM.equal(this, fsm)
} //NaiveFSM__equal()
NaiveFSM.prototype.equal = function(other){
return NaiveFSM.equal(this, other)
} //.equal(other)

@@ -121,4 +119,3 @@

*/
NaiveFSM.prototype.alloc = NaiveFSM__alloc
function NaiveFSM__alloc(reqSpanNum) {
NaiveFSM.prototype.alloc = function(reqSpanNum){
if (reqSpanNum === undefined) reqSpanNum = this.props.minSpanNum()

@@ -151,3 +148,3 @@

return begBlkNum //and reqSpanNum is implicit
} //NaiveFSM__alloc()
} //.alloc()

@@ -162,4 +159,3 @@

*/
NaiveFSM.prototype.free = NaiveFSM__free
function NaiveFSM__free(begBlkNum, spanNum) {
NaiveFSM.prototype.free = function(begBlkNum, spanNum){
//FIXME: we need to make sure begBlkNum/spanNum blocks are not already

@@ -229,3 +225,3 @@ // in the free list. Or should this be done in Segment?

this._insert(begBlkNum, spanNum)
} //NaiveFSM__free()
} //.free()

@@ -250,7 +246,6 @@

} //NaiveFSM__insert()
} //.insert()
NaiveFSM.prototype._delete = NaiveFSM__delete
function NaiveFSM__delete(spanNum, idx) {
NaiveFSM.prototype._delete = function(spanNum, idx){
var begBlkNum = this.spans[spanNum].splice(idx, 1)[0]

@@ -260,5 +255,5 @@ delete this.fsiBeg[begBlkNum]

return begBlkNum
}
} //._delete()
//

@@ -45,4 +45,3 @@

*/
Handle.assertValidSegNum = Handle_assertValidSegNum
function Handle_assertValidSegNum(segNum, props){
Handle.assertValidSegNum = function(segNum, props){
props = props || Props.defaultProps

@@ -62,4 +61,3 @@ assert.ok(segNum % 1 === 0, format("segNum=%j not an Integer", segNum))

*/
Handle.assertValidBlockNum = Handle_assertValidBlockNum
function Handle_assertValidBlockNum(blkNum, props) {
Handle.assertValidBlockNum = function(blkNum, props){
props = props || Props.defaultProps

@@ -80,4 +78,3 @@ assert.ok(blkNum % 1 === 0, format("blkNum(%j) not an Integer", blkNum))

*/
Handle.assertValidSpanNum = Handle_assertValidSpanNum
function Handle_assertValidSpanNum(spanNum, props) {
Handle.assertValidSpanNum = function(spanNum, props){
props = props || Props.defaultProps

@@ -97,4 +94,3 @@ assert.ok(spanNum % 1 === 0, format("spanNum(%j) not an Integer", spanNum))

*/
Handle.isValidSegNum = Handle_isValidSegNum
function Handle_isValidSegNum(segNum, props){
Handle.isValidSegNum = function(segNum, props){
props = props || Props.defaultProps

@@ -108,4 +104,3 @@ return segNum % 1 === 0 && segNum >= props.minSegNum() && segNum <= props.maxSegNum()

*/
Handle.isValidBlockNum = Handle_isValidBlockNum
function Handle_isValidBlockNum(blkNum, props) {
Handle.isValidBlockNum = function(blkNum, props){
props = props || Props.defaultProps

@@ -119,4 +114,3 @@ return blkNum % 1 === 0 && blkNum >= props.minBlkNum() && blkNum <= props.maxBlkNum()

*/
Handle.isValidSpanNum = Handle_isValidSpanNum
function Handle_isValidSpanNum(spanNum, props) {
Handle.isValidSpanNum = function(spanNum, props){
props = props || Props.defaultProps

@@ -177,4 +171,3 @@ return spanNum % 1 === 0 && spanNum >= props.minSpanNum() && spanNum <= props.maxSpanNum()

*/
Handle.decode = Handle_decode
function Handle_decode(hdlv, props) {
Handle.decode = function(hdlv, props){
props = props || Props.defaultProps

@@ -181,0 +174,0 @@ assert(props.numHandleBits == 32, "only support encoding 32 bit handles")

@@ -324,2 +324,19 @@

/**
* maxHandleSize is the largest number of bytes a handle can address.
* ie base block + max num of span blocks in bytes.
*/
Props.prototype.maxHandleSize = function(){
return this.blockSize() + ( this.blockSize() * this.maxSpanNum() )
}
/**
* minHandleSize is the largest number of bytes a handle can address.
* ie base block + min num of span blocks in bytes.
*/
Props.prototype.minHandleSize = function(){
return this.blockSize() + ( this.blockSize() * this.minSpanNum() )
}
Props.prototype.toString = function(){

@@ -326,0 +343,0 @@ return JSON.stringify( this.metaProps() )

@@ -60,4 +60,3 @@

*/
Segment.prototype.get = Segment__get
function Segment__get(blkNum) {
Segment.prototype.get = function(blkNum){
//return this.freeBlockMap[blkNum]

@@ -79,4 +78,3 @@ var fbmv = this.freeBlockMap[blkNum]

*/
Segment.prototype.set = Segment__set
function Segment__set(blkNum, v) {
Segment.prototype.set = function(blkNum, v){
var fixedOffset = this.props.checkSumOffset + this.props.checkSumBits

@@ -98,4 +96,3 @@

*/
Segment.prototype.reserve = Segment__reserve
function Segment__reserve(numBlks) {
Segment.prototype.reserve = function(numBlks){
var spanNum = numBlks-1

@@ -121,4 +118,3 @@ , blkNum

*/
Segment.prototype.release = Segment__release
function Segment__release(hdl) {
Segment.prototype.release = function(hdl){

@@ -135,4 +131,3 @@ for (var i = hdl.blkNum; i <= hdl.blkNum+hdl.spanNum; i += 1) {

Segment.equal = Segment_equal
function Segment_equal(a, b) {
Segment.equal = function(a, b){
if (!(a instanceof Segment)) return false

@@ -148,4 +143,3 @@ if (!(b instanceof Segment)) return false

Segment.prototype.equal = Segment__equal
function Segment__equal(seg) {
Segment.prototype.equal = function(seg){
return Segment.equal(this, seg)

@@ -152,0 +146,0 @@ }

{
"name" : "block-file"
, "version" : "1.0.3"
, "version" : "1.1.0"
, "description" : "A library to read/write blocks from a file"

@@ -23,3 +23,2 @@ , "keywords" : ["block", "buffer", "storage"]

, "lib/utils.js"
, "lib/y-fs.js"
, "test/lorem-ipsum.1k.txt"

@@ -39,3 +38,2 @@ , "test/lorem-ipsum.4k.txt"

, "async" : ">=0.2.6"
, "ya-promise" : ">=1.0.0"
, "lodash" : ">=1.0.1"

@@ -42,0 +40,0 @@ , "stats-api" : ">=1.1.0" }

# Block File Libary
## Apology for API changes
I offer my applogies to anyone actually using this library between 1.0.x
and 1.1.x . I've come to the belief that I should not use the promise based
API. I asthetically like promises, but promises come with the problem of
`process.nextTick` versus `setImmediate`. I even considered a hybrid approach
with a queue of tasks executing a fixed number of tasks each `process.nextTick`.
But that is for another day (and debugging head aches). So `block-file` with
promises via `setImmediate` turns out to be much slower that of `block-file`
with async (just running `time npm test` is 33 seconds with promises and 23
seconds with async). Additionally the async API can be easily converted to
a promise API via `Y.promisify` aka `Y.nfbind` or `Y.denodify` (or simmilar
functions in other promise libraries).
## Future?
I have plans for a 2.0 version which will make this library more
"transaction-capable". What that means I haven't quite figured out. But
the idea is to write a log of changes to a BlockFile/Segments and only
commit them when directed to. Any failed commit (via crash) can be re-tried
to regain a vaild state.
# Purpose

@@ -19,7 +41,8 @@

### `promise = Blockfile.open(filename, [props])`
### Blockfile.open(filename, [props], cb)`
Where `cb` has the signature `cb(err, bf)`. and `bf` is the block-file obect.
```javascript
BlockFile.open(filename)
.then(function(bf){
BlockFile.open(filename, function(err, bf){
...

@@ -29,19 +52,24 @@ })

### `promise = bf.close()`
### `bf.close(cb)`
Where `cb` has the signature `cb(err)`.
```javascript
BlockFile.open(filename)
.then(function(bf){
return bf.close()
BlockFile.open(filename, function(err, bf){
if (err) throw err
return bf.close(function(err){
if (err) throw err
console.log("%s closed", filename)
})
})
.then(function(){
console.log("%s closed", filename)
})
```
### `promise = bf.store(buffer, [handle])`
### `bf.store(buffer, [handle], cb)`
Where `cb` has the signature `cb(err, handle)`. If node `handle` is provided
as an argument to `bf.store()` then a new `handle` is allocated. The callback
contains to `handle` of where the `buffer` was stored.
```javascript
bf.store(buffer)
.then(function(handle){
bf.store(buffer, function(err, handle){
...

@@ -52,4 +80,3 @@ })

```javascript
bf.store(buffer, handle)
.then(function(handle){
bf.store(buffer, handle, function(err, handle){
...

@@ -59,7 +86,6 @@ })

### `promise = bf.load(handle)
### `bf.load(handle, cb)`
```javascript
bf.load(handle)
.spread(function(buffer, handle){
bf.load(handle, function(err, buffer, handle){
...

@@ -69,6 +95,10 @@ })

### `handle = bf.reserve(numBlocks)`
### `handle = bf.reserve(numBlocks, cb)`
### `boolean = bf.release(handle)`
Where `cb` has the signature `cb(err)`.
### `boolean = bf.release(handle, cb)`
Where `cb` has the signature `cb(err)`.
`boolean` reflects whether the handle was reserved already or not.

@@ -94,2 +124,4 @@

I am thinking of not having a 64bit handle option. :) solved that problem...
#### blockSzBits

@@ -122,5 +154,4 @@ default: `12`

* 4 => 1, 2, 3, ..., or 16 blocks
* 5 (are you getting the pattern yet?)
* 5 => 1, 2, 3, ..., or 32 blocks (you get the picture)
#### checkSumBits

@@ -139,42 +170,45 @@ default: `16`

```javascript
var Y = require('ya-promise')
var async = require('async')
, fs = require('fs')
, BlockFile = require('block-file')
, hdls
, data_fn = "my-data.bf"
, hdls_fn = "my-data-hdls.json"
, str = "lorem ipsum ..."
BlockFile.open("my-data.bf")
.then(
function(bf){
var strLen = Buffer.byteLength(str)
, buf = new Buffer(strLen+2)
, promises = []
BlockFile.open(data_fn, function(err, bf){
if (err) throw err
var strLen = Buffer.byteLength(str)
, buf = new Buffer(strLen+2)
, promises = []
buf.writeUInt16BE(strLen, 0)
buf.write(str, 2, strLen)
promises[0] = bf.store(buf)
promises[1] = bf.store(buf)
return Y.all(promises)
.then(function(v){ hdls = v })
.then(function(){ return bf.close() })
})
.then(
function(){
var a = hdls.map(function(v){ return v.toString() })
console.log(a)
fs.writeFileSync("handles.json", JSON.stringify(a))
})
.done()
buf.writeUInt16BE(strLen, 0)
buf.write(str, 2, strLen)
async.eachSeries(
[ buf, buf ]
, function(b, next){ bf.store(b, next) }
, function(err, res){
if (err) throw err
hdls = res
bf.close(function(err){
if (err) throw err
var a = hdls.map(function(hdl){ return hdl.toString() })
console.log(a)
fs.writeFileSync(hdls_fn, JSON.stringify(a))
})
})
```
```javascript
var fs = require('fs')
, BlockFile = require('block-file')
, Handle = require('./lib/handle')
var async = require('async')
, fs = require('fs')
, BlockFile = require('..')
, Handle = BlockFile.Handle
, data_fn = "my-data.bf"
, hdls_fn = "my-data-hdls.json"
, hdls = []
var data = JSON.parse( fs.readFileSync("handles.json") )
data.forEach(function(hdlStr,i){
var hdlStrs = JSON.parse( fs.readFileSync(hdls_fn) )
hdlStrs.forEach(function(hdlStr,i){
var hdl = Handle.fromString( hdlStr )

@@ -184,24 +218,32 @@ hdls.push(hdl)

BlockFile.open("my-data.bf")
.then(function(bf){
var promises = []
hdls.forEach(function(hdl,i){
promises[i] = bf.load(hdl)
})
BlockFile.open(data_fn, function(err, bf){
if (err) throw err
return Y.all(promises)
.then(function(rets){
rets.forEach(function(ret, i){
var buf = ret[0]
, hdl = ret[1]
, len = buf.readUInt16BE(0)
, str = buf.toString('utf8', 2, len+2)
var data = []
async.eachSeries(
hdls
, function(hdl, next){
bf.load(hdl, function(err, buf, hdl_){
if (err) { next(err); return }
data.push([buf, hdl_])
next()
})
}
, function(err){
if (err) throw err
for (var i=0; i<data.length; i+=1) {
var buf = data[i][0]
, hdl = data[i][1]
, len = buf.readUInt16BE(0)
, str = buf.toString('utf8', 2, len+2)
console.log("content = %j", str)
})
})
console.log("\nhdl = %s", hdl)
console.log("content = %s", str)
}
}
)
})
.done()
```

@@ -21,3 +21,3 @@ /* global describe it */

var seg, oseg, segNum, hdl
, buf = new Buffer(props.fsmSize())
, buf = new Buffer(props.blockSize())

@@ -24,0 +24,0 @@ buf.fill(0xff)

/* global describe it */
var Handle = require('../lib/handle')
var props = require('../lib/props').defaultProps
, Handle = require('../lib/handle')
, BlockFile = require('../lib/block_file')
, utils = require('../lib/utils')
, fs = require('fs')
, u = require('lodash')
, async = require('async')
, Y = require('ya-promise')
, assert = require('assert')
, expect = require('chai').expect
, util = require('util')
, inspect = util.inspect
, format = util.format
//Y.nextTick = process.nextTick
var USE_ASYNC = /^(?:y|yes|t|true)$/i.test(process.env['USE_ASYNC'])
var Props = require('../lib/props')
, props = Props.defaultProps
, NUM_SPANNUM = props.numSpanNums()
, BLOCK_SIZE = props.blockSize()
, NUM_BLOCKNUM = props.numBlkNums()
var filename ='test.bf'
, fnStat
, err, fnStat
, lorem4k_fn = 'test/lorem-ipsum.4k.txt'

@@ -32,2 +27,3 @@ , lorem4kStr

, lorem64kBuf
, sz
, outputFN = "stats.txt"

@@ -47,13 +43,11 @@

lorem4kSiz = Buffer.byteLength( lorem4kStr, 'utf8' )
lorem4kBuf = new Buffer( 4 + lorem4kSiz )
lorem4kBuf.writeUInt32BE( lorem4kSiz, 0 )
lorem4kBuf.write( lorem4kStr, 4, lorem4kSiz, 'utf8' )
assert.ok(lorem4kBuf.length < 4*1024) //lorem4kStr.length < 4*1024-4
lorem4kBuf = new Buffer( 2 + lorem4kSiz )
lorem4kBuf.writeUInt16BE( lorem4kSiz, 0 )
lorem4kBuf.write( lorem4kStr, 2, lorem4kSiz, 'utf8' )
lorem64kStr = fs.readFileSync(lorem64k_fn, 'utf8')
lorem64kSiz = Buffer.byteLength( lorem64kStr, 'utf8' )
lorem64kBuf = new Buffer( 4 + lorem64kSiz )
lorem64kBuf.writeUInt32BE( lorem64kSiz, 0 )
lorem64kBuf.write( lorem64kStr, 4, lorem64kSiz, 'utf8' )
assert.ok(lorem64kBuf.length < 64*1024) //lorem64kStr.length < 64*1024-4
lorem64kBuf = new Buffer( 2 + lorem64kSiz )
lorem64kBuf.writeUInt16BE( lorem64kSiz, 0 )
lorem64kBuf.write( lorem64kStr, 2, lorem64kSiz, 'utf8' )

@@ -69,15 +63,13 @@

it("should create a file "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_){ bf = bf_; done() }, done )
//BlockFile.open(filename, function(err, bf_){
// bf = bf_
// if (err) {
// done(err)
// return
// }
// expect(bf).to.be.an.instanceof(BlockFile)
// //expect(bf instanceof BlockFile).to.be.true
// done()
//})
//BlockFile.create(filename, function(err, bf_){
BlockFile.open(filename, function(err, bf_){
bf = bf_
if (err) {
done(err)
return
}
expect(bf).to.be.an.instanceof(BlockFile)
//expect(bf instanceof BlockFile).to.be.true
done()
})
})

@@ -87,4 +79,3 @@

it("bf.close()", function(done){
//bf.close(done)
bf.close().then(done, done)
bf.close(done)
})

@@ -99,8 +90,12 @@

it("should open "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_){
expect(bf_).to.be.an.instanceof(BlockFile)
BlockFile.open(filename, function(err, bf_){
bf = bf_
if (err) {
done(err)
return
}
expect(bf).to.be.an.instanceof(BlockFile)
//expect(bf instanceof BlockFile).to.be.true
done()
}, done )
})
})

@@ -119,10 +114,11 @@

var storep = bf.store(lorem4kBuf)
storep.then(
function(hdl) {
//console.log("blks[%d].hdl = %s", i, hdl)
blks[i].hdl = hdl;
done()
bf.store(lorem4kBuf, function(err, hdl) {
if (err) {
done(err)
return
}
, function(err){ done(err) })
blks[i].hdl = hdl;
done()
})
})

@@ -135,4 +131,3 @@

it("bf.close()", function(done){
//bf.close(done)
bf.close().then(done, done)
bf.close(done)
})

@@ -147,8 +142,11 @@

it("should open "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_){
expect(bf_).to.be.an.instanceof(BlockFile)
BlockFile.open(filename, function(err, bf_){
bf = bf_
if (err) {
done(err)
return
}
expect(bf).to.be.an.instanceof(BlockFile)
done()
}, done )
})
})

@@ -159,22 +157,19 @@

var i = lastIdx
, loadp = bf.load(blks[i].hdl)
loadp.spread(
function(buf, hdl){
var siz, str
bf.load(blks[i].hdl, function(err, buf, hdl){
if (err) { done(err); return; }
var siz, str
siz = buf.readUInt32BE(0)
expect(siz).to.equal(blks[i].siz)
siz = buf.readUInt16BE(0)
expect(siz).to.equal(blks[i].siz)
str = buf.toString('utf8', 4, 4+siz)
expect(str).to.equal(blks[i].str)
str = buf.toString('utf8', 2, 2+siz)
expect(str).to.equal(blks[i].str)
done()
}
, function(err){ done(err) })
done()
})
})
it("bf.close()", function(done){
//bf.close(done)
bf.close().then(done, done)
bf.close(done)
})

@@ -189,14 +184,18 @@

it("should open "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_){
expect(bf_).to.be.an.instanceof(BlockFile)
BlockFile.open(filename, function(err, bf_){
bf = bf_
if (err) {
done(err)
return
}
expect(bf).to.be.an.instanceof(BlockFile)
//expect(bf instanceof BlockFile).to.be.true
done()
}, done )
})
})
it("Write a 4k buffer to file 32751 (NUM_BLOCKNUM-1) times"
it("Write a 4k buffer to file 32751 (NUM_BLKNUM-1) times"
, function(done){
this.timeout(10*1000)
this.timeout(5000)

@@ -206,51 +205,32 @@ lastIdx = nextIdx

if (USE_ASYNC) {
var i = lastIdx
async.whilst(
/*test*/
function() { return i < nextIdx }
/*body*/
, function(loop){
blks[i] = {/*str: lorem, siz: num, hdl: Handle*/}
blks[i].str = lorem4kStr
blks[i].siz = lorem4kSiz
var i = lastIdx
bf.store(lorem4kBuf).then(
function(hdl) {
//console.log("blks[%d].hdl = %s", i, hdl)
blks[i].hdl = hdl;
i += 1
loop()
}
, function(err) { loop(err) })
}
/*results*/
, function(err){ done(err) } )
}
else {
var d = Y.defer()
, p = d.promise
async.whilst(
/*test*/
function() { return i < nextIdx }
/*body*/
, function(loop){
blks[i] = {/*str: lorem, siz: num, hdl: Handle*/}
blks[i].str = lorem4kStr
blks[i].siz = lorem4kSiz
for (var j = lastIdx; j < nextIdx; j++)
p = p.then(
function(i){
blks[i] = {/*str: lorem, siz: num, hdl: Handle*/}
blks[i].str = lorem4kStr
blks[i].siz = lorem4kSiz
bf.store(lorem4kBuf, function(err, hdl) {
if (err) {
loop(err)
return
}
return bf.store(lorem4kBuf).then(
function(hdl) {
//console.log("blks[%d].hdl = %s", i, hdl)
blks[i].hdl = hdl;
return i + 1
})
})
blks[i].hdl = hdl;
p.then( function(j){ console.log("done: j = %j", j); done() }
, function(err){ done(err) } )
i += 1
loop()
})
}
/*results*/
, function(err){
done(err)
}
)
})
d.resolve(lastIdx)
}
})
it("Should STILL have only ONE segment", function(){

@@ -261,4 +241,3 @@ expect(bf.segments.length).to.equal(1)

it("bf.close()", function(done){
//bf.close(done)
bf.close().then(done, done)
bf.close(done)
})

@@ -273,8 +252,11 @@

it("should open "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_){
expect(bf_).to.be.an.instanceof(BlockFile)
BlockFile.open(filename, function(err, bf_){
bf = bf_
if (err) {
done(err)
return
}
expect(bf).to.be.an.instanceof(BlockFile)
done()
}, done )
})
})

@@ -285,69 +267,36 @@

, function(done){
this.timeout(10*1000)
this.timeout(10000)
var start = nextIdx - NUM_BLOCKNUM
var i = nextIdx - NUM_BLOCKNUM
, end = i + NUM_BLOCKNUM
if (USE_ASYNC) {
var i = start
async.whilst(
/*test*/
function() { return i < end }
/*body*/
, function(loop){
var loadp = bf.load(blks[i].hdl)
async.whilst(
/*test*/
function() { return i < end }
/*body*/
, function(loop){
bf.load(blks[i].hdl, function(err, buf, hdl){
if (err) { done(err); return; }
var siz, str
loadp.spread(
function(buf, hdl){
var siz, str
siz = buf.readUInt16BE(0)
expect(siz).to.equal(blks[0].siz)
siz = buf.readUInt32BE(0)
expect(siz).to.equal(blks[0].siz)
str = buf.toString('utf8', 2, 2+siz)
str = buf.toString('utf8', 4, 4+siz)
expect(str).to.equal(blks[i].str)
expect(str).to.equal(blks[i].str)
i += 1
loop()
})
}
/*results*/
, function(err){
done(err)
}
)
})
i += 1
loop()
}
, loop)
}
/*results*/
, function(err){ done(err) })
}
else {
var d = Y.defer()
, p = d.promise
for (var j = start; j < end; j++)
p = p.then(
function(i){
var loadp = bf.load(blks[i].hdl)
return loadp.spread(
function(buf, hdl){
var siz, str
siz = buf.readUInt32BE(0)
expect(siz).to.equal(blks[0].siz)
str = buf.toString('utf8', 4, 4+siz)
expect(str).to.equal(blks[i].str)
return i + 1
})
})
p.then( function(j){ console.log("done: j = %j", j); done() }
, function(err){ done(err) } )
d.resolve(start)
}
}) //it
it("bf.close()", function(done){
//bf.close(done)
bf.close().then(done, done)
bf.close(done)
})

@@ -362,8 +311,11 @@

it("should open "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_){
expect(bf_).to.be.an.instanceof(BlockFile)
BlockFile.open(filename, function(err, bf_){
bf = bf_
if (err) {
done(err)
return
}
expect(bf).to.be.an.instanceof(BlockFile)
done()
}, done )
})
})

@@ -378,3 +330,2 @@

var i = lastIdx
, storep = bf.store(lorem4kBuf)

@@ -385,8 +336,11 @@ blks[i] = {/*str: lorem, siz: num, hdl: Handle*/}

storep.then(
function(hdl) {
blks[i].hdl = hdl
done()
bf.store(lorem4kBuf, function(err, hdl) {
if (err) {
done(err)
return
}
, function(err){ done(err) })
blks[i].hdl = hdl;
done()
})
})

@@ -399,4 +353,3 @@

it("bf.close()", function(done){
//bf.close(done)
bf.close().then(done, done)
bf.close(done)
})

@@ -411,8 +364,12 @@

it("should open "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_){
expect(bf_).to.be.an.instanceof(BlockFile)
BlockFile.open(filename, function(err, bf_){
bf = bf_
if (err) {
done(err)
return
}
expect(bf).to.be.an.instanceof(BlockFile)
//expect(bf instanceof BlockFile).to.be.true
done()
}, done )
})
})

@@ -423,3 +380,3 @@

, function(done){
this.timeout(10*1000)
this.timeout(5000)

@@ -429,51 +386,32 @@ lastIdx = nextIdx

if (USE_ASYNC) {
var i = lastIdx
async.whilst(
/*test*/
function() { return i < nextIdx }
/*body*/
, function(loop){
//blks[i] = { str: lorem4kStr, siz: lorem4kSiz, hdl: undefined }
blks[i] = {/*str: lorem, siz: num, hdl: Handle*/}
blks[i].str = lorem4kStr
blks[i].siz = lorem4kSiz
var i = lastIdx
var storep = bf.store(lorem4kBuf)
storep.then(
function(hdl) {
blks[i].hdl = hdl;
i += 1
loop()
}
, function(err){ loop(err) })
async.whilst(
/*test*/
function() { return i < nextIdx }
/*body*/
, function(loop){
blks[i] = {
/* str: lorem, siz: num, hdl: Handle */
}
/*results*/
, function(err){ done(err) })
}
else {
var d = Y.defer()
, p = d.promise
blks[i].str = lorem4kStr
blks[i].siz = lorem4kSiz
for (var j = lastIdx; j < nextIdx; j++)
p = p.then(
function(i){
//blks[i] = {str: lorem4kStr, siz: lorem4kSiz, hdl: undefined}
blks[i] = {/*str: lorem, siz: num, hdl: Handle*/}
blks[i].str = lorem4kStr
blks[i].siz = lorem4kSiz
bf.store(lorem4kBuf, function(err, hdl) {
if (err) {
loop(err)
return
}
return bf.store(lorem4kBuf).then(
function(hdl) {
blks[i].hdl = hdl;
return i + 1
})
})
blks[i].hdl = hdl;
p.then( function(j){ console.log("done: j = %j", j); done() }
, function(err){ done(err) } )
d.resolve(lastIdx)
}
i += 1
loop()
})
}
/*results*/
, function(err){
done(err)
}
)
})

@@ -486,4 +424,3 @@

it("bf.close()", function(done){
//bf.close(done)
bf.close().then(done, done)
bf.close(done)
})

@@ -498,8 +435,12 @@

it("should open "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_){
expect(bf_).to.be.an.instanceof(BlockFile)
BlockFile.open(filename, function(err, bf_){
bf = bf_
if (err) {
done(err)
return
}
expect(bf).to.be.an.instanceof(BlockFile)
//expect(bf instanceof BlockFile).to.be.true
done()
}, done )
})
})

@@ -519,10 +460,11 @@

var storep = bf.store(lorem64kBuf)
bf.store(lorem64kBuf, function(err, hdl) {
if (err) {
done(err)
return
}
storep.then(
function(hdl) {
blks[i].hdl = hdl;
done()
}
, function(err){ done(err) })
blks[i].hdl = hdl;
done()
})
})

@@ -535,4 +477,3 @@

it("bf.close()", function(done){
//bf.close(done)
bf.close().then(done, done)
bf.close(done)
})

@@ -547,64 +488,52 @@

it("should open "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_){
expect(bf_).to.be.an.instanceof(BlockFile)
BlockFile.open(filename, function(err, bf_){
bf = bf_
if (err) {
done(err)
return
}
expect(bf).to.be.an.instanceof(BlockFile)
//expect(bf instanceof BlockFile).to.be.true
done()
}, done )
})
})
it("write a 64k buffer to file 2046 (NUM_BLOCKNUM/NUM_SPANNUM - 1) times"
it("write a 64k buffer to file 2046 (NUM_BLOCKNUM/(MAX_SPANNUM+1)-1) times"
, function(done){
this.timeout(2*1000)
this.timeout(5000)
lastIdx = nextIdx
nextIdx = lastIdx + (NUM_BLOCKNUM/NUM_SPANNUM - 1)
nextIdx = lastIdx + (NUM_BLOCKNUM/16 - 1)
if (USE_ASYNC) {
var i = lastIdx
async.whilst(
/*test*/
function() { return i < nextIdx }
/*body*/
, function(loop){
blks[i] = {/*str: lorem, siz: num, hdl: Handle*/}
blks[i].str = lorem64kStr
blks[i].siz = lorem64kSiz
var i = lastIdx
var storep = bf.store(lorem64kBuf)
storep.then(
function(hdl){
blks[i].hdl = hdl
i += 1
loop()
}
, function(err){ loop(err) })
async.whilst(
/*test*/
function() { return i < nextIdx }
/*body*/
, function(loop){
blks[i] = {
/* str: lorem, siz: num, hdl: Handle */
}
/*results*/
, function(err){ done(err) })
}
else {
var d = Y.defer()
, p = d.promise
blks[i].str = lorem64kStr
blks[i].siz = lorem64kSiz
for (var j = lastIdx; j < nextIdx; j++)
p = p.then(
function(i){
blks[i] = {/*str: lorem, siz: num, hdl: Handle*/}
blks[i].str = lorem64kStr
blks[i].siz = lorem64kSiz
bf.store(lorem64kBuf, function(err, hdl) {
if (err) {
loop(err)
return
}
return bf.store(lorem64kBuf).then(
function(hdl){
blks[i].hdl = hdl
return i + 1
})
})
blks[i].hdl = hdl;
p.then( function(j){ console.log("done: j = %j", j); done() }
, function(err){ done(err) } )
d.resolve(lastIdx)
}
i += 1
loop()
})
}
/*results*/
, function(err){
done(err)
}
)
})

@@ -617,4 +546,3 @@

it("bf.close()", function(done){
//bf.close(done)
bf.close().then(done, done)
bf.close(done)
})

@@ -628,8 +556,12 @@

it("should open "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_){
expect(bf_).to.be.an.instanceof(BlockFile)
BlockFile.open(filename, function(err, bf_){
bf = bf_
if (err) {
done(err)
return
}
expect(bf).to.be.an.instanceof(BlockFile)
//expect(bf instanceof BlockFile).to.be.true
done()
}, done )
})
})

@@ -648,9 +580,11 @@

var storep = bf.store(lorem4kBuf)
storep.then(
function(hdl) {
blks[i].hdl = hdl;
done()
bf.store(lorem4kBuf, function(err, hdl) {
if (err) {
done(err)
return
}
, function(err){ done(err) })
blks[i].hdl = hdl;
done()
})
})

@@ -663,4 +597,3 @@

it("bf.close()", function(done){
//bf.close(done)
bf.close().then(done, done)
bf.close(done)
})

@@ -675,8 +608,11 @@

it("should open "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_){
expect(bf_).to.be.an.instanceof(BlockFile)
BlockFile.open(filename, function(err, bf_){
bf = bf_
if (err) {
done(err)
return
}
expect(bf).to.be.an.instanceof(BlockFile)
done()
}, done )
})
})

@@ -687,68 +623,38 @@

, function(done){
this.timeout(15*1000)
this.timeout(10000)
//utils.err("lastIdx=%j; nextIdx=%j; blks.length=%j"
// ,lastIdx , nextIdx, blks.length)
for (var k=0; k<blks.length; k+=1) {
assert(typeof blks[k] != 'undefined', format("blks[%d] is undefined", k))
for (var j=0; j<blks.length; j+=1) {
assert(!u.isUndefined(blks[j]), format("blks[%d] is undefined", j))
}
if (USE_ASYNC) {
var i = 0
async.whilst(
/*test*/
function() { return i < blks.length }
/*body*/
, function(loop){
var loadp = bf.load(blks[i].hdl)
loadp.spread(
function(buf, hdl){
var siz, str
var i = 0
siz = buf.readUInt32BE(0)
expect(siz).to.equal(blks[i].siz)
async.whilst(
/*test*/
function() { return i < blks.length }
/*body*/
, function(loop){
bf.load(blks[i].hdl, function(err, buf, hdl){
if (err) { done(err); return; }
var siz, str
str = buf.toString('utf8', 4, 4+siz)
expect(str).to.equal(blks[i].str)
siz = buf.readUInt16BE(0)
expect(siz).to.equal(blks[i].siz)
i += 1
loop()
}
, loop)
}
/*results*/
, function(err){ done(err) })
}
else {
var d = Y.defer()
, p = d.promise
str = buf.toString('utf8', 2, 2+siz)
expect(str).to.equal(blks[i].str)
for (var j = 0; j < blks.length; j++)
p = p.then(
function(i){
return bf.load(blks[i].hdl).spread(
function(buf, hdl){
var siz, str
i += 1
loop()
})
}
/*results*/
, function(err){
done(err)
}
)
})
siz = buf.readUInt32BE(0)
expect(siz).to.equal(blks[i].siz)
str = buf.toString('utf8', 4, 4+siz)
expect(str).to.equal(blks[i].str)
return i + 1
})
})
p.then( function(j){ console.log("done: j = %j", j); done() }
, function(err){ done(err) } )
d.resolve(0)
}
}) //it
it("bf.close()", function(done){
//bf.close(done)
bf.close().then(done, done)
bf.close(done)
})

@@ -761,6 +667,12 @@

fs.writeFile(outputFN, BlockFile.STATS.toString({values:"both"})+"\n"
, function(err){ done(err) })
, function(){
if (err) { done(err); return }
done()
})
})
})
//describe("BlockFile.open()", function(){
// it("", function(){})
//})
})

@@ -9,3 +9,2 @@ /* global describe it */

, async = require('async')
, Y = require('ya-promise')
, assert = require('assert')

@@ -20,5 +19,2 @@ , expect = require('chai').expect

//Y.nextTick = process.nextTick
var USE_ASYNC = /^(?:y|yes|t|true)$/i.test(process.env['USE_ASYNC'])
var filename ='test-alt.bf'

@@ -116,4 +112,3 @@ , fnStat

it("BlockFile.create(), "+filename, function(done){
BlockFile.create(filename, metaProps)
.then(done, done)
BlockFile.create(filename, metaProps, done)
})

@@ -126,8 +121,11 @@

it("BlockFile.open(), "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_) { bf = bf_; done() }, done)
BlockFile.open(filename, function(err, bf_){
if (err) { done(err); return }
bf = bf_
done()
})
})
it("bf.close()", function(done){
bf.close().then(done, done)
bf.close(done)
})

@@ -139,4 +137,7 @@ })

it("BlockFile.open(), "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_) { bf = bf_; done() }, done)
BlockFile.open(filename, function(err, bf_){
if (err) { done(err); return }
bf = bf_
done()
})
})

@@ -151,55 +152,26 @@

if (USE_ASYNC) {
utils.err("USING ASYNC")
var i = lastIdx
async.whilst(
/*test*/
function() { return i < nextIdx }
/*body*/
, function(loop){
blks[i] = {/*str: lorem, siz: num, hdl: Handle*/}
blks[i].str = lorem1kStr
blks[i].siz = lorem1kSiz
var i = lastIdx
async.whilst(
/*test*/
function() { return i < nextIdx }
/*body*/
, function(loop){
blks[i] = {/*str: lorem, siz: num, hdl: Handle*/}
blks[i].str = lorem1kStr
blks[i].siz = lorem1kSiz
bf.store(lorem1kBuf).then(
function(hdl) {
//utils.err("blks[%d] = %s", i, hdl)
lastHdl = hdl
blks[i].hdl = hdl;
i += 1
loop()
}
, function(err) { loop(err) })
}
/*results*/
, function(err){ done(err) } )
}
else {
var d = Y.defer()
, p = d.promise
bf.store(lorem1kBuf, function(err, hdl){
if (err) { loop(err); return }
//utils.err("blks[%d] = %s", i, hdl)
lastHdl = hdl
blks[i].hdl = hdl;
i += 1
loop()
})
}
/*results*/
, function(err){ done(err) } )
for (var j = lastIdx; j < nextIdx; j++)
p = p.then(
function(i){
//utils.err("i = %j", i)
blks[i] = {/*str: lorem, siz: num, hdl: Handle*/}
blks[i].str = lorem1kStr
blks[i].siz = lorem1kSiz
}) //it("Write bf.props.numBlkNums() lorem1k buffers",
return bf.store(lorem1kBuf).then(
function(hdl) {
//utils.err("blks[%d] = %s", j, hdl)
lastHdl = hdl
blks[i].hdl = hdl;
return i + 1
})
})
p.then( function(j){ console.log("done: j = %j", j); done() }
, function(err){ done(err) } )
d.resolve(lastIdx)
}
})
it("should only have one segemnt", function(){

@@ -211,6 +183,6 @@ //utils.err("lastHdl = %s", lastHdl)

it("bf.close()", function(done){
bf.close().then(done, done)
bf.close(done)
})
})
}) //describe("Write 1 segment worth of 1k buffers",

@@ -220,4 +192,7 @@ describe("Write 1 segment worth of 64k buffers", function(){

it("BlockFile.open(), "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_) { bf = bf_; done() }, done)
BlockFile.open(filename, function(err, bf_){
if (err) { done(err); return }
bf = bf_
done()
})
})

@@ -231,54 +206,25 @@

//utils.err("ceil(numBlkNums/64-1 = %j)", ceil(bf.props.numBlkNums()/64)-1)
var i = lastIdx
async.whilst(
/*test*/
function() { return i < nextIdx }
/*body*/
, function(loop){
blks[i] = {/*str: lorem, siz: num, hdl: Handle*/}
blks[i].str = lorem64kStr
blks[i].siz = lorem64kSiz
if (USE_ASYNC) {
utils.err("USING ASYNC")
var i = lastIdx
async.whilst(
/*test*/
function() { return i < nextIdx }
/*body*/
, function(loop){
blks[i] = {/*str: lorem, siz: num, hdl: Handle*/}
blks[i].str = lorem64kStr
blks[i].siz = lorem64kSiz
bf.store(lorem64kBuf, function(err, hdl) {
if (err) { loop(err); return }
//utils.err("blks[%d] = %s", i, hdl)
blks[i].hdl = hdl;
i += 1
loop()
})
}
/*results*/
, function(err){ done(err) } )
bf.store(lorem64kBuf).then(
function(hdl) {
//utils.err("blks[%d] = %s", i, hdl)
blks[i].hdl = hdl;
i += 1
loop()
}
, function(err) { loop(err) })
}
/*results*/
, function(err){ done(err) } )
}
else {
var d = Y.defer()
, p = d.promise
}) //it("Write bf.props.numBlkNums()/64 lorem1k buffers",
for (var j = lastIdx; j < nextIdx; j++)
p = p.then(
function(i){
blks[i] = {/*str: lorem, siz: num, hdl: Handle*/}
blks[i].str = lorem64kStr
blks[i].siz = lorem64kSiz
return bf.store(lorem64kBuf).then(
function(hdl) {
//utils.err("blks[%d] = %s", i, hdl)
blks[i].hdl = hdl;
return i + 1
})
})
p.then( function(j){ console.log("done: j = %j", j); done() }
, function(err){ done(err) } )
d.resolve(lastIdx)
}
})
it("should have two segemnts", function(){

@@ -289,11 +235,15 @@ expect(bf.segments.length).to.equal(2)

it("bf.close()", function(done){
bf.close().then(done, done)
bf.close(done)
})
})
}) //describe("Write 1 segment worth of 64k buffers",
describe("Write one more 64k block to roll #segments over to 3", function(){
var bf
it("BlockFile.open(), "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_) { bf = bf_; done() }, done)
BlockFile.open(filename, function(err, bf_){
if (err) { done(err); return }
bf = bf_
done()
})
})

@@ -310,12 +260,12 @@

bf.store(lorem64kBuf).then(
function(hdl){
//utils.err("blks[%d] = %s", i, hdl)
blks[i].hdl = hdl;
done()
})
bf.store(lorem64kBuf, function(err, hdl){
if (err) { done(err); return }
//utils.err("blks[%d] = %s", i, hdl)
blks[i].hdl = hdl;
done()
})
})
it("bf.close()", function(done){
bf.close().then(done, done)
bf.close(done)
})

@@ -332,79 +282,48 @@

it("should open "+filename, function(done){
BlockFile.open(filename)
.then(function(bf_){
BlockFile.open(filename, function(err, bf_){
if (err) { done(err); return }
expect(bf_).to.be.an.instanceof(BlockFile)
bf = bf_
done()
}, done )
})
})
it("Read all blks.length blks[i].hdl"
, function(done){
this.timeout(10*1000)
it("Read all blks.length blks[i].hdl", function(done){
this.timeout(10*1000)
for (var j=0; j<blks.length; j+=1) {
assert(typeof blks[j] != 'undefined', format("blks[%d] is undefined", j))
}
for (var j=0; j<blks.length; j+=1) {
assert(typeof blks[j] != 'undefined', format("blks[%d] is undefined", j))
}
//utils.err("blks.length = %j", blks.length)
//utils.err("blks.length = %j", blks.length)
if (USE_ASYNC) {
utils.err("USING ASYNC")
var i = 0
async.whilst(
/*test*/
function() { return i < blks.length }
/*body*/
, function(loop){
var loadp = bf.load(blks[i].hdl)
loadp.spread(
function(buf, hdl){
var siz, str
var i = 0
async.whilst(
/*test*/
function() { return i < blks.length }
/*body*/
, function(loop){
bf.load(blks[i].hdl, function(err, buf, hdl){
if (err) { loop(err); return }
siz = buf.readUInt32BE(0)
expect(siz).to.equal(blks[i].siz)
var siz, str
str = buf.toString('utf8', 4, 4+siz)
expect(str).to.equal(blks[i].str)
siz = buf.readUInt32BE(0)
expect(siz).to.equal(blks[i].siz)
i += 1
loop()
}
, loop)
}
/*results*/
, function(err){ done(err) })
}
else {
var d = Y.defer()
, p = d.promise
blks.forEach(function(blk, i){
p = p.then(function(r){
//console.log("r = %j", r)
//console.log("i = %j", i)
//console.log("bf.load(blks[%j].hdl)", r)
bf.load(blk.hdl)
.spread(function(buf, hdl){
var siz, str
str = buf.toString('utf8', 4, 4+siz)
expect(str).to.equal(blks[i].str)
siz = buf.readUInt32BE(0)
expect(siz).to.equal(blk.siz)
i += 1
loop()
})
}
/*results*/
, function(err){ done(err) })
str = buf.toString('utf8', 4, 4+siz)
expect(str).to.equal(blk.str)
})
}) //it("Read all blks.length blks[i].hdl"
return r+1
})
})
p.then(function(){ done() }, done)
d.resolve(0)
}
})
it("bf.close()", function(done){
//bf.close(done)
bf.close().then(done, done)
bf.close(done)
})

@@ -414,3 +333,3 @@

describe("Write the stats aut to "+outputFN, function(){
describe("Write the stats out to "+outputFN, function(){
it("should dump BlockFile.STATS", function(done){

@@ -422,3 +341,2 @@ fs.writeFile(outputFN, BlockFile.STATS.toString({values:"both"})+"\n"

//describe("", function(){})
})
}) //describe("BlockFile w/alternative metaProps",
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc