@tokenizer/range
Advanced tools
Comparing version 0.2.2 to 0.3.0
/// <reference types="node" /> | ||
import { AbstractTokenizer } from 'strtok3/lib/AbstractTokenizer'; | ||
import { IContentRangeType, IHeadRequestInfo, IRangeRequestClient } from './types'; | ||
import { IReadChunkOptions } from 'strtok3/lib/types'; | ||
/** | ||
@@ -18,19 +19,14 @@ * RangeRequestTokenizer is tokenizer which is an adapter for range-request clients. | ||
* Read portion from stream | ||
* @param {Buffer} buffer: Target buffer | ||
* @param {number} offset: Offset is the offset in the buffer to start writing at; if not provided, start at 0 | ||
* @param {number} length: The number of bytes to read, of not provided the buffer length will be used | ||
* @param {number} position: Position where to begin reading from in the file. If position is not defined, data will be read from the current file position. | ||
* @returns {Promise<number>} | ||
* @param buffer - Target buffer | ||
* @param options - Additional read options | ||
* @returns Promise with number of bytes read | ||
*/ | ||
readBuffer(buffer: Buffer, offset?: number, length?: number, position?: number): Promise<number>; | ||
readBuffer(buffer: Buffer, options?: IReadChunkOptions): Promise<number>; | ||
/** | ||
* Peek (read ahead) buffer from tokenizer | ||
* @param buffer - Target buffer to fill with data peek from the tokenizer-stream | ||
* @param offset - The offset in the buffer to start writing at; if not provided, start at 0 | ||
* @param length - is an integer specifying the number of bytes to read | ||
* @param position is an integer specifying where to begin reading from in the file. If position is null, data will be read from the current file position. | ||
* @param maybeless - If set, will not throw an EOF error if not all of the requested data could be read | ||
* @param options - Additional read options | ||
* @returns Promise with number of bytes read | ||
*/ | ||
peekBuffer(buffer: Buffer, offset?: number, length?: number, position?: number): Promise<number>; | ||
peekBuffer(buffer: Buffer, options?: IReadChunkOptions): Promise<number>; | ||
/** | ||
@@ -37,0 +33,0 @@ * Ignore number of bytes, advances the pointer in under tokenizer-stream. |
@@ -26,20 +26,11 @@ "use strict"; | ||
* Read portion from stream | ||
* @param {Buffer} buffer: Target buffer | ||
* @param {number} offset: Offset is the offset in the buffer to start writing at; if not provided, start at 0 | ||
* @param {number} length: The number of bytes to read, of not provided the buffer length will be used | ||
* @param {number} position: Position where to begin reading from in the file. If position is not defined, data will be read from the current file position. | ||
* @returns {Promise<number>} | ||
* @param buffer - Target buffer | ||
* @param options - Additional read options | ||
* @returns Promise with number of bytes read | ||
*/ | ||
async readBuffer(buffer, offset = 0, length = buffer.length, position) { | ||
if (position) { | ||
this.position = position; | ||
async readBuffer(buffer, options) { | ||
if (options && options.position) { | ||
this.position = options.position; | ||
} | ||
debug(`readBuffer position=${this.position} length=${length}`); | ||
if (length === 0) { | ||
return 0; | ||
} | ||
if (!length) { | ||
length = buffer.length; | ||
} | ||
await this.peekBuffer(buffer, offset, length, this.position); | ||
const length = await this.peekBuffer(buffer, options); | ||
this.position += length; | ||
@@ -51,9 +42,26 @@ return length; | ||
* @param buffer - Target buffer to fill with data peek from the tokenizer-stream | ||
* @param offset - The offset in the buffer to start writing at; if not provided, start at 0 | ||
* @param length - is an integer specifying the number of bytes to read | ||
* @param position is an integer specifying where to begin reading from in the file. If position is null, data will be read from the current file position. | ||
* @param maybeless - If set, will not throw an EOF error if not all of the requested data could be read | ||
* @param options - Additional read options | ||
* @returns Promise with number of bytes read | ||
*/ | ||
peekBuffer(buffer, offset = 0, length = buffer.length, position = this.position) { | ||
async peekBuffer(buffer, options) { | ||
let length = buffer.length; | ||
let offset = 0; | ||
let position = this.position; | ||
if (options) { | ||
if (options.position) { | ||
position = options.position; | ||
} | ||
if (Number.isInteger(options.offset)) { | ||
offset = options.offset; | ||
} | ||
if (options.length) { | ||
length = options.length; | ||
} | ||
else { | ||
length -= offset; | ||
} | ||
} | ||
if (length === 0) { | ||
return 0; | ||
} | ||
debug(`peekBuffer position=${position} length=${length}`); | ||
@@ -60,0 +68,0 @@ const lastPos = position + length - 1; |
{ | ||
"name": "@tokenizer/range", | ||
"version": "0.2.2", | ||
"version": "0.3.0", | ||
"description": "Range-request tokenizer adapter", | ||
@@ -57,3 +57,3 @@ "main": "lib/index.js", | ||
"@types/mocha": "^5.2.7", | ||
"@types/node": "^13.1.2", | ||
"@types/node": "^13.5.0", | ||
"chai": "^4.2.0", | ||
@@ -63,13 +63,13 @@ "coveralls": "^3.0.7", | ||
"fs-extra": "^8.1.0", | ||
"mocha": "^6.2.2", | ||
"music-metadata": "^6.0.1", | ||
"mocha": "^7.0.0", | ||
"music-metadata": "^6.3.1", | ||
"nyc": "^15.0.0", | ||
"ts-loader": "^6.2.1", | ||
"ts-node": "^8.5.2", | ||
"tslint": "^5.20.0", | ||
"typescript": "^3.7.4" | ||
"ts-node": "^8.6.2", | ||
"tslint": "^6.0.0", | ||
"typescript": "^3.7.5" | ||
}, | ||
"dependencies": { | ||
"debug": "^4.1.1", | ||
"strtok3": "^5.0.0" | ||
"strtok3": "^6.0.0" | ||
}, | ||
@@ -76,0 +76,0 @@ "nyc": { |
505
27499
+ Added@tokenizer/token@0.3.0(transitive)
+ Addedpeek-readable@4.1.0(transitive)
+ Addedstrtok3@6.3.0(transitive)
- Removed@tokenizer/token@0.1.1(transitive)
- Removedpeek-readable@3.1.4(transitive)
- Removedstrtok3@5.0.2(transitive)
Updatedstrtok3@^6.0.0