filereader-stream
Advanced tools
Comparing version 0.2.0 to 1.0.0
113
index.js
@@ -1,95 +0,34 @@ | ||
var inherits = require('inherits') | ||
var EventEmitter = require('events').EventEmitter | ||
/* global FileReader */ | ||
var from2 = require('from2') | ||
var toBuffer = require('typedarray-to-buffer') | ||
module.exports = FileStream | ||
function FileStream(file, options) { | ||
if (!(this instanceof FileStream)) | ||
return new FileStream(file, options) | ||
module.exports = function (file, options) { | ||
options = options || {} | ||
options.output = options.output || 'arraybuffer' | ||
this.options = options | ||
this._file = file | ||
this.readable = true | ||
this.offset = options.offset || 0 | ||
this.paused = false | ||
this.chunkSize = this.options.chunkSize || 8128 | ||
var offset = options.offset || 0 | ||
var chunkSize = options.chunkSize || 1024 * 1024 // default 1MB chunk has tolerable perf on large files | ||
var fileReader = new FileReader(file) | ||
var tags = ['name','size','type','lastModifiedDate'] | ||
tags.forEach(function (thing) { | ||
this[thing] = file[thing] | ||
}, this) | ||
} | ||
var from = from2(function (size, cb) { | ||
if (offset >= file.size) return cb(null, null) | ||
fileReader.onloadend = function loaded (event) { | ||
var data = event.target.result | ||
if (data instanceof ArrayBuffer) data = toBuffer(new Uint8Array(event.target.result)) | ||
cb(null, data) | ||
} | ||
var end = offset + chunkSize | ||
var slice = file.slice(offset, end) | ||
fileReader.readAsArrayBuffer(slice) | ||
offset = end | ||
}) | ||
FileStream.prototype._FileReader = function() { | ||
var self = this | ||
var reader = new FileReader() | ||
var outputType = this.options.output | ||
from.name = file.name | ||
from.size = file.size | ||
from.type = file.type | ||
from.lastModifiedDate = file.lastModifiedDate | ||
reader.onloadend = function loaded(event) { | ||
var data = event.target.result | ||
if (data instanceof ArrayBuffer) | ||
data = new Buffer(new Uint8Array(event.target.result)) | ||
self.dest.write(data) | ||
if (self.offset < self._file.size) { | ||
self.emit('progress', self.offset) | ||
!self.paused && self.readChunk(outputType) | ||
return | ||
} | ||
self._end() | ||
fileReader.onerror = function (err) { | ||
from.destroy(err) | ||
} | ||
reader.onerror = function(e) { | ||
self.emit('error', e.target.error) | ||
} | ||
return reader | ||
return from | ||
} | ||
FileStream.prototype.readChunk = function(outputType) { | ||
var end = this.offset + this.chunkSize | ||
var slice = this._file.slice(this.offset, end) | ||
this.offset = end | ||
if (outputType === 'binary') | ||
this.reader.readAsBinaryString(slice) | ||
else if (outputType === 'dataurl') | ||
this.reader.readAsDataURL(slice) | ||
else if (outputType === 'arraybuffer') | ||
this.reader.readAsArrayBuffer(slice) | ||
else if (outputType === 'text') | ||
this.reader.readAsText(slice) | ||
} | ||
FileStream.prototype._end = function() { | ||
if (this.dest !== console && (!this.options || this.options.end !== false)) { | ||
this.dest.end && this.dest.end() | ||
this.dest.close && this.dest.close() | ||
this.emit('end', this._file.size) | ||
} | ||
} | ||
FileStream.prototype.pipe = function pipe(dest, options) { | ||
this.reader = this._FileReader() | ||
this.readChunk(this.options.output) | ||
this.dest = dest | ||
return dest | ||
} | ||
FileStream.prototype.pause = function() { | ||
this.paused = true | ||
return this.offset | ||
} | ||
FileStream.prototype.resume = function() { | ||
this.paused = false | ||
this.readChunk(this.options.output) | ||
} | ||
FileStream.prototype.abort = function() { | ||
this.paused = true | ||
this.reader.abort() | ||
this._end() | ||
return this.offset | ||
} | ||
inherits(FileStream, EventEmitter) |
{ | ||
"name": "filereader-stream", | ||
"version": "0.2.0", | ||
"version": "1.0.0", | ||
"description": "Read an HTML5 File object (from e.g. HTML5 drag and drops) as a stream.", | ||
"main": "index.js", | ||
"scripts": { | ||
"test": "beefy test.js" | ||
"test": "standard && wzrd test.js" | ||
}, | ||
"author": "max ogden", | ||
"license": "BSD 2-Clause", | ||
"license": "BSD-2-Clause", | ||
"repository": { | ||
"type": "git", | ||
"url": "https://github.com/maxogden/filereader-stream.git" | ||
"url": "git+https://github.com/maxogden/filereader-stream.git" | ||
}, | ||
@@ -19,12 +19,14 @@ "bugs": { | ||
"homepage": "https://github.com/maxogden/filereader-stream", | ||
"dependencies": { | ||
"from2": "^2.1.0", | ||
"typedarray-to-buffer": "^3.0.4" | ||
}, | ||
"devDependencies": { | ||
"browserify": "~3.11.0", | ||
"drag-and-drop-files": "~0.0.1", | ||
"concat-stream": "~1.2.1", | ||
"standard": "^5.3.1", | ||
"tape": "~2.3.2", | ||
"browserify": "~3.11.0", | ||
"beefy": "~0.6.0" | ||
}, | ||
"dependencies": { | ||
"inherits": "1.0.0" | ||
"through2": "^2.0.0", | ||
"wzrd": "^1.3.1" | ||
} | ||
} |
@@ -5,9 +5,6 @@ # filereader-stream | ||
[![NPM](https://nodei.co/npm/filereader-stream.png)](https://nodei.co/npm/filereader-stream/) | ||
If you want this for FileLists then definitely check out [fileliststream](http://github.com/brianloveswords/fileliststream). | ||
# install | ||
Use it with npm & [browserify >= 3.0](/substack/node-browserify) | ||
Use it with npm & [browserify](/substack/node-browserify) | ||
@@ -20,5 +17,5 @@ ```bash | ||
```js | ||
var drop = require('drag-and-drop-files'); | ||
var concat = require('concat-stream'); | ||
var createReadStream = require('filereader-stream'); | ||
var drop = require('drag-and-drop-files') | ||
var concat = require('concat-stream') | ||
var fileReaderStream = require('filereader-stream') | ||
@@ -28,3 +25,3 @@ test('should read file when one is dropped', function(t) { | ||
var first = files[0] | ||
createReadStream(first).pipe(concat(function(contents) { | ||
fileReaderStream(first).pipe(concat(function(contents) { | ||
// contents is the contents of the entire file | ||
@@ -39,13 +36,12 @@ })) | ||
```js | ||
var createReadStream = require('filereader-stream', [options]); | ||
var fileReaderStream = require('filereader-stream') | ||
var readStream = fileReaderStream(file, [options]) | ||
``` | ||
`options` is optional and can specify `output`. Possible values are: | ||
`fileReaderStream` is a Streams 2 Readable Stream, so you can do all the streamy things with it like `.pipe` etc. | ||
* `arraybuffer` [default] | ||
* `binary` | ||
* `dataurl` | ||
* `text` | ||
`options`: | ||
You can also specify `chunkSize`, default is `8128`. This is how many bytes will be read and written at a time to the stream you get back for each file. | ||
* `chunkSize` - default `1024 * 1024` (1MB) - How many bytes will be read at a time | ||
* `offset` - default `0` - Where in the file to start reading | ||
@@ -52,0 +48,0 @@ # run the tests |
61
test.js
@@ -1,44 +0,25 @@ | ||
var test = require('tape'); | ||
var drop = require('drag-and-drop-files'); | ||
var concat = require('concat-stream'); | ||
var frs = require('./'); | ||
var test = require('tape') | ||
var through = require('through2') | ||
var drop = require('drag-and-drop-files') | ||
var frs = require('./') | ||
drop(document.body, function(files) { | ||
var first = files[0], | ||
s = frs(first), | ||
cursor, paused | ||
drop(document.body, function (files) { | ||
var first = files[0] | ||
var LEN = 1024 * 512 | ||
var s = frs(first, {chunkSize: LEN}) | ||
test('should read file when one is dropped', function(t) { | ||
s.pipe(concat(function(contents) { | ||
t.true(contents.length > 0) | ||
t.end() | ||
})) | ||
}) | ||
s.on('progress', function(offset){ | ||
if (offset / first.size < 0.5 || paused) return | ||
test('should pause when over 30% of the file is read', function(t) { | ||
cursor = s.pause() | ||
t.true(cursor / first.size >= 0.5) | ||
t.end() | ||
}) | ||
test('should resume 2 seconds after pause', function(t) { | ||
setTimeout(function(){ | ||
s.resume() | ||
t.true(s.paused === false) | ||
t.end() | ||
}, 2000) | ||
}) | ||
paused = true | ||
test('should read file when one is dropped', function (t) { | ||
var buffs = [] | ||
var concatter = through(function (ch, enc, cb) { | ||
buffs.push(ch) | ||
t.ok(ch.length <= LEN, 'length is <= ' + LEN) | ||
cb() | ||
}, function () { | ||
var all = Buffer.concat(buffs) | ||
t.ok(all.length > 0, 'got some data') | ||
t.equal(all.length, s.size, 'size is ' + s.size) | ||
t.end() | ||
}) | ||
s.pipe(concatter) | ||
}) | ||
s.on('end', function(offset){ | ||
test('should return correct offset upon end event', function(t) { | ||
t.true(first.size === offset) | ||
t.end() | ||
}) | ||
}) | ||
}) |
Sorry, the diff of this file is not supported yet
No v1
QualityPackage is not semver >=1. This means it is not stable and does not support ^ ranges.
Found 1 instance in 1 package
0
5324
2
6
52
52
+ Addedfrom2@^2.1.0
+ Addedtypedarray-to-buffer@^3.0.4
+ Addedcore-util-is@1.0.3(transitive)
+ Addedfrom2@2.3.0(transitive)
+ Addedinherits@2.0.4(transitive)
+ Addedis-typedarray@1.0.0(transitive)
+ Addedisarray@1.0.0(transitive)
+ Addedprocess-nextick-args@2.0.1(transitive)
+ Addedreadable-stream@2.3.8(transitive)
+ Addedsafe-buffer@5.1.2(transitive)
+ Addedstring_decoder@1.1.1(transitive)
+ Addedtypedarray-to-buffer@3.1.5(transitive)
+ Addedutil-deprecate@1.0.2(transitive)
- Removedinherits@1.0.0
- Removedinherits@1.0.0(transitive)