@tokenizer/range
Advanced tools
Comparing version 0.6.0 to 0.7.0
@@ -18,3 +18,3 @@ export interface IChunk { | ||
hasDataRange(offsetStart: number, offsetEnd: number): boolean; | ||
readToBuffer(buffer: Buffer, offset: number, position: number, length: number): number; | ||
readToBuffer(buffer: Uint8Array, offset: number, position: number, length: number): number; | ||
private _concatData; | ||
@@ -30,3 +30,2 @@ /** | ||
private _getChunkRange; | ||
getByteAt(offset: number): number; | ||
} |
@@ -1,4 +0,1 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.ChunkedFileData = void 0; | ||
const NOT_FOUND = -1; | ||
@@ -11,3 +8,3 @@ /** | ||
*/ | ||
class ChunkedFileData { | ||
export class ChunkedFileData { | ||
constructor() { | ||
@@ -74,3 +71,3 @@ this._fileData = []; | ||
let chunkLength = Math.min(length, dataChunk.data.byteLength - chunkOffset); | ||
Buffer.from(dataChunk.data).copy(buffer, offset, chunkOffset, chunkOffset + chunkLength); | ||
buffer.set(dataChunk.data.subarray(chunkOffset, chunkOffset + chunkLength), offset); | ||
if (chunkLength < length) { | ||
@@ -85,3 +82,3 @@ chunkLength += this.readToBuffer(buffer, offset + chunkLength, position + chunkLength, length - chunkLength); | ||
_concatData(buffer1, buffer2) { | ||
const tmp = Buffer.alloc(buffer1.byteLength + buffer2.byteLength); | ||
const tmp = new Uint8Array(buffer1.byteLength + buffer2.byteLength); | ||
tmp.set(new Uint8Array(buffer1), 0); | ||
@@ -148,12 +145,3 @@ tmp.set(new Uint8Array(buffer2), buffer1.byteLength); | ||
} | ||
getByteAt(offset) { | ||
const buf = Buffer.alloc(1); | ||
const bytesRead = this.readToBuffer(buf, 0, offset, 1); | ||
if (bytesRead < 1) { | ||
throw new Error('Offset ' + offset + " hasn't been loaded yet."); | ||
} | ||
return buf[0]; | ||
} | ||
} | ||
exports.ChunkedFileData = ChunkedFileData; | ||
//# sourceMappingURL=chunked-file-data.js.map |
@@ -1,5 +0,5 @@ | ||
import { IRangeRequestClient, IRangeRequestConfig } from './types'; | ||
import { ITokenizer } from 'strtok3/lib/core'; | ||
export { IRangeRequestClient, IRangeRequestResponse, IContentRangeType, IHeadRequestInfo, IRangeRequestConfig } from './types'; | ||
export { parseContentRange } from './range-request-tokenizer'; | ||
import type { IRangeRequestClient, IRangeRequestConfig } from './types.js'; | ||
import { type ITokenizer } from 'strtok3'; | ||
export type { IRangeRequestClient, IRangeRequestResponse, IContentRangeType, IHeadRequestInfo, IRangeRequestConfig } from './types.js'; | ||
export { parseContentRange } from './range-request-tokenizer.js'; | ||
/** | ||
@@ -6,0 +6,0 @@ * Construct range-tokenizer from range-request-client and config |
@@ -1,8 +0,3 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parseContentRange = void 0; | ||
exports.tokenizer = tokenizer; | ||
const range_request_factory_1 = require("./range-request-factory"); | ||
var range_request_tokenizer_1 = require("./range-request-tokenizer"); | ||
Object.defineProperty(exports, "parseContentRange", { enumerable: true, get: function () { return range_request_tokenizer_1.parseContentRange; } }); | ||
import { RangeRequestFactory } from './range-request-factory.js'; | ||
export { parseContentRange } from './range-request-tokenizer.js'; | ||
/** | ||
@@ -14,6 +9,6 @@ * Construct range-tokenizer from range-request-client and config | ||
*/ | ||
function tokenizer(rangeRequestClient, config) { | ||
const factory = new range_request_factory_1.RangeRequestFactory(rangeRequestClient); | ||
export function tokenizer(rangeRequestClient, config) { | ||
const factory = new RangeRequestFactory(rangeRequestClient); | ||
return factory.initTokenizer(config); | ||
} | ||
//# sourceMappingURL=index.js.map |
@@ -1,6 +0,11 @@ | ||
import { IRangeRequestConfig, IHeadRequestInfo, IRangeRequestClient } from './types'; | ||
import { RangeRequestTokenizer } from './range-request-tokenizer'; | ||
import type { IRangeRequestConfig, IHeadRequestInfo, IRangeRequestClient } from './types.js'; | ||
import { RangeRequestTokenizer } from './range-request-tokenizer.js'; | ||
interface IInternalRangeRequestConfig extends IRangeRequestConfig { | ||
avoidHeadRequests: boolean; | ||
initialChunkSize: number; | ||
minimumChunkSize: number; | ||
} | ||
export declare class RangeRequestFactory { | ||
private rangeRequestClient; | ||
config: IRangeRequestConfig; | ||
config: IInternalRangeRequestConfig; | ||
constructor(rangeRequestClient: IRangeRequestClient); | ||
@@ -16,1 +21,2 @@ initTokenizer(config?: IRangeRequestConfig): Promise<RangeRequestTokenizer>; | ||
} | ||
export {}; |
@@ -1,8 +0,5 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.RangeRequestFactory = void 0; | ||
const range_request_tokenizer_1 = require("./range-request-tokenizer"); | ||
const initDebug = require("debug"); | ||
import { RangeRequestTokenizer, roundRange } from './range-request-tokenizer.js'; | ||
import initDebug from 'debug'; | ||
const debug = initDebug('range-request-reader'); | ||
class RangeRequestFactory { | ||
export class RangeRequestFactory { | ||
constructor(rangeRequestClient) { | ||
@@ -21,3 +18,3 @@ this.rangeRequestClient = rangeRequestClient; | ||
const headRequestInfo = await this.getHeadRequestInfo(); | ||
return new range_request_tokenizer_1.RangeRequestTokenizer(this.rangeRequestClient, headRequestInfo, this.config.minimumChunkSize); | ||
return new RangeRequestTokenizer(this.rangeRequestClient, headRequestInfo, this.config.minimumChunkSize); | ||
} | ||
@@ -34,7 +31,9 @@ /** | ||
async fetchFileInfoWithHeadRequest() { | ||
debug(`_fetchSizeWithHeadRequest()`); | ||
const info = await this.rangeRequestClient.getHeadInfo(); | ||
if (info.size) { | ||
debug(`MIME-type=${info.mimeType}, content-length=${info.size}`); | ||
return info; | ||
debug('_fetchSizeWithHeadRequest()'); | ||
if (this.rangeRequestClient.getHeadInfo) { | ||
const info = await this.rangeRequestClient.getHeadInfo(); | ||
if (info.size) { | ||
debug(`MIME-type=${info.mimeType}, content-length=${info.size}`); | ||
return info; | ||
} | ||
} | ||
@@ -47,3 +46,3 @@ // Content-Length not provided by the server, fallback to | ||
async fetchFileInfoWithGetRequest() { | ||
const range = (0, range_request_tokenizer_1.roundRange)([0, this.config.initialChunkSize], this.config.minimumChunkSize); | ||
const range = roundRange([0, this.config.initialChunkSize], this.config.minimumChunkSize); | ||
const response = await this.rangeRequestClient.getResponse('GET', range); | ||
@@ -57,3 +56,2 @@ debug(`_fetchSizeWithGetRequest response: contentRange=${response.contentRange}`); | ||
} | ||
exports.RangeRequestFactory = RangeRequestFactory; | ||
//# sourceMappingURL=range-request-factory.js.map |
@@ -1,7 +0,6 @@ | ||
import { AbstractTokenizer } from 'strtok3/lib/AbstractTokenizer'; | ||
import { IContentRangeType, IHeadRequestInfo, IRangeRequestClient } from './types'; | ||
import { IReadChunkOptions } from 'strtok3/lib/types'; | ||
import { AbstractTokenizer, type IReadChunkOptions } from 'strtok3'; | ||
import type { IContentRangeType, IHeadRequestInfo, IRangeRequestClient } from './types.js'; | ||
/** | ||
* RangeRequestTokenizer is tokenizer which is an adapter for range-request clients. | ||
* Typically HTTP clients implementing the HTTP Range Requests (https://tools.ietf.org/html/rfc7233) | ||
* Typically, HTTP clients implementing the HTTP Range Requests (https://tools.ietf.org/html/rfc7233) | ||
* | ||
@@ -18,14 +17,14 @@ * Inspired by "XHR Reader" written by António Afonso | ||
* Read portion from stream | ||
* @param buffer - Target buffer | ||
* @param uint8array - Target `Uint8Array` | ||
* @param options - Additional read options | ||
* @returns Promise with number of bytes read | ||
*/ | ||
readBuffer(buffer: Buffer, options?: IReadChunkOptions): Promise<number>; | ||
readBuffer(uint8array: Uint8Array, options?: IReadChunkOptions): Promise<number>; | ||
/** | ||
* Peek (read ahead) buffer from tokenizer | ||
* @param buffer - Target buffer to fill with data peek from the tokenizer-stream | ||
* @param uint8array - Target `Uint8Array` to fill with data peek from the tokenizer-stream | ||
* @param options - Additional read options | ||
* @returns Promise with number of bytes read | ||
*/ | ||
peekBuffer(buffer: Buffer, options?: IReadChunkOptions): Promise<number>; | ||
peekBuffer(uint8array: Uint8Array, options?: IReadChunkOptions): Promise<number>; | ||
/** | ||
@@ -44,2 +43,2 @@ * Ignore number of bytes, advances the pointer in under tokenizer-stream. | ||
*/ | ||
export declare function parseContentRange(contentRange: string): IContentRangeType; | ||
export declare function parseContentRange(contentRange: string | null): IContentRangeType; |
@@ -1,13 +0,8 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.RangeRequestTokenizer = void 0; | ||
exports.roundRange = roundRange; | ||
exports.parseContentRange = parseContentRange; | ||
const initDebug = require("debug"); | ||
const AbstractTokenizer_1 = require("strtok3/lib/AbstractTokenizer"); | ||
const chunked_file_data_1 = require("./chunked-file-data"); | ||
import { AbstractTokenizer } from 'strtok3'; | ||
import { ChunkedFileData } from './chunked-file-data.js'; | ||
import initDebug from 'debug'; | ||
const debug = initDebug('range-request-reader'); | ||
/** | ||
* RangeRequestTokenizer is tokenizer which is an adapter for range-request clients. | ||
* Typically HTTP clients implementing the HTTP Range Requests (https://tools.ietf.org/html/rfc7233) | ||
* Typically, HTTP clients implementing the HTTP Range Requests (https://tools.ietf.org/html/rfc7233) | ||
* | ||
@@ -17,5 +12,5 @@ * Inspired by "XHR Reader" written by António Afonso | ||
*/ | ||
class RangeRequestTokenizer extends AbstractTokenizer_1.AbstractTokenizer { | ||
export class RangeRequestTokenizer extends AbstractTokenizer { | ||
constructor(rangeRequestClient, fileInfo, minimumChunkSize) { | ||
super(fileInfo); | ||
super({ fileInfo }); | ||
this.rangeRequestClient = rangeRequestClient; | ||
@@ -26,15 +21,15 @@ this.minimumChunkSize = minimumChunkSize; | ||
} | ||
this._fileData = new chunked_file_data_1.ChunkedFileData(); | ||
this._fileData = new ChunkedFileData(); | ||
} | ||
/** | ||
* Read portion from stream | ||
* @param buffer - Target buffer | ||
* @param uint8array - Target `Uint8Array` | ||
* @param options - Additional read options | ||
* @returns Promise with number of bytes read | ||
*/ | ||
async readBuffer(buffer, options) { | ||
async readBuffer(uint8array, options) { | ||
if (options && options.position) { | ||
this.position = options.position; | ||
} | ||
const length = await this.peekBuffer(buffer, options); | ||
const length = await this.peekBuffer(uint8array, options); | ||
this.position += length; | ||
@@ -45,8 +40,8 @@ return length; | ||
* Peek (read ahead) buffer from tokenizer | ||
* @param buffer - Target buffer to fill with data peek from the tokenizer-stream | ||
* @param uint8array - Target `Uint8Array` to fill with data peek from the tokenizer-stream | ||
* @param options - Additional read options | ||
* @returns Promise with number of bytes read | ||
*/ | ||
async peekBuffer(buffer, options) { | ||
let length = buffer.length; | ||
async peekBuffer(uint8array, options) { | ||
let length = uint8array.length; | ||
let offset = 0; | ||
@@ -72,5 +67,7 @@ let position = this.position; | ||
debug(`peekBuffer position=${position} length=${length}`); | ||
if (!this.fileInfo) | ||
throw new Error('File-info missing'); | ||
const lastPos = Math.min(this.fileInfo.size - 1, position + length - 1); | ||
return this.loadRange([position, lastPos]).then(() => { | ||
this._fileData.readToBuffer(buffer, offset, position, Math.min(this.fileInfo.size, length)); | ||
this._fileData.readToBuffer(uint8array, offset, position, Math.min(this.fileInfo.size, length)); | ||
return length; | ||
@@ -94,3 +91,3 @@ }); | ||
async loadRange(range) { | ||
if (range[0] > this.fileInfo.size - 1) { | ||
if (range[0] > (this.fileInfo.size - 1)) { | ||
throw new Error('End-Of-File'); | ||
@@ -101,4 +98,4 @@ } | ||
if (this._fileData.hasDataRange(range[0], range[1])) { | ||
debug(`Read from cache`); | ||
return Promise.resolve(); | ||
debug('Read from cache'); | ||
return; | ||
} | ||
@@ -120,4 +117,3 @@ // Always download in multiples of CHUNK_SIZE. If we're going to make a | ||
} | ||
exports.RangeRequestTokenizer = RangeRequestTokenizer; | ||
function roundRange(range, minimumChunkSize) { | ||
export function roundRange(range, minimumChunkSize) { | ||
const length = range[1] - range[0] + 1; | ||
@@ -132,16 +128,17 @@ // const newLength = Math.ceil(length / minimum_chunk_size) * minimum_chunk_size; | ||
*/ | ||
function parseContentRange(contentRange) { | ||
export function parseContentRange(contentRange) { | ||
if (!contentRange) { | ||
throw new Error('Content range must be provided'); | ||
} | ||
debug(`_parseContentRang response: contentRange=${contentRange}`); | ||
if (contentRange) { | ||
const parsedContentRange = contentRange.match(/bytes (\d+)-(\d+)\/(?:(\d+)|\*)/i); | ||
if (!parsedContentRange) { | ||
throw new Error('FIXME: Unknown Content-Range syntax: ' + contentRange); | ||
} | ||
return { | ||
firstBytePosition: parseInt(parsedContentRange[1], 10), | ||
lastBytePosition: parseInt(parsedContentRange[2], 10), | ||
instanceLength: parsedContentRange[3] ? parseInt(parsedContentRange[3], 10) : null | ||
}; | ||
const parsedContentRange = contentRange.match(/bytes (\d+)-(\d+)\/(?:(\d+)|\*)/i); | ||
if (!parsedContentRange) { | ||
throw new Error('FIXME: Unknown Content-Range syntax: ' + contentRange); | ||
} | ||
return { | ||
firstBytePosition: parseInt(parsedContentRange[1], 10), | ||
lastBytePosition: parseInt(parsedContentRange[2], 10), | ||
instanceLength: parsedContentRange[3] ? parseInt(parsedContentRange[3], 10) : undefined | ||
}; | ||
} | ||
//# sourceMappingURL=range-request-tokenizer.js.map |
@@ -1,2 +0,2 @@ | ||
import { IFileInfo } from 'strtok3/lib/core'; | ||
import type { IFileInfo } from 'strtok3'; | ||
export interface IRangeRequestConfig { | ||
@@ -32,3 +32,3 @@ timeoutInSec?: number; | ||
export interface IRangeRequestResponse extends IHeadRequestInfo { | ||
arrayBuffer: () => Promise<Buffer>; | ||
arrayBuffer: () => Promise<Uint8Array>; | ||
} | ||
@@ -35,0 +35,0 @@ /** |
@@ -1,3 +0,2 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
export {}; | ||
//# sourceMappingURL=types.js.map |
{ | ||
"name": "@tokenizer/range", | ||
"version": "0.6.0", | ||
"version": "0.7.0", | ||
"description": "Range-request tokenizer adapter", | ||
"main": "lib/index.js", | ||
"type": "module", | ||
"exports": "./lib/index.js", | ||
"types": "lib/index.d.ts", | ||
@@ -20,3 +21,3 @@ "files": [ | ||
"engines": { | ||
"node": ">=14" | ||
"node": ">=16" | ||
}, | ||
@@ -55,21 +56,32 @@ "funding": { | ||
"build": "npm run clean && yarn run compile", | ||
"lint": "tslint 'lib/**/*.ts' --exclude 'lib/**/*.d.ts' 'test/**/*.ts' --exclude 'test/**/*.d.ts'", | ||
"test": "mocha --require ts-node/register --require source-map-support/register --full-trace test/*.test.ts", | ||
"eslint": "eslint lib test", | ||
"lint": "npx eslint", | ||
"test": "mocha", | ||
"test-node": "cd node && yarn install && yarn test-data", | ||
"test-coverage": "nyc npm run test" | ||
"test-coverage": "c8 npm run test", | ||
"send-codacy": "c8 report --reporter=text-lcov | codacy-coverage" | ||
}, | ||
"devDependencies": { | ||
"@types/chai": "^4.3.4", | ||
"@types/fs-extra": "^11.0.4", | ||
"@types/mocha": "^8.2.0", | ||
"@types/node": "^20.14.10", | ||
"chai": "^4.3.7", | ||
"@eslint/compat": "^1.1.1", | ||
"@types/chai": "^4.3.16", | ||
"@types/debug": "^4.1.12", | ||
"@types/mocha": "^10.0.7", | ||
"@types/node": "^20.14.11", | ||
"@typescript-eslint/eslint-plugin": "^7.16.1", | ||
"@typescript-eslint/parser": "^7.16.1", | ||
"c8": "^10.1.2", | ||
"chai": "^5.1.1", | ||
"del-cli": "^5.1.0", | ||
"fs-extra": "^11.2.0", | ||
"mocha": "^9.2.2", | ||
"music-metadata": "^7.14.0", | ||
"nyc": "^17.0.0", | ||
"eslint": "^9.7.0", | ||
"eslint-config-prettier": "^9.1.0", | ||
"eslint-import-resolver-typescript": "^3.6.1", | ||
"eslint-plugin-import": "^2.29.1", | ||
"eslint-plugin-jsdoc": "^48.8.3", | ||
"eslint-plugin-node": "^11.1.0", | ||
"eslint-plugin-unicorn": "^54.0.0", | ||
"global": "^4.4.0", | ||
"mocha": "^10.7.0", | ||
"music-metadata": "^10.0.0", | ||
"ts-loader": "^9.5.1", | ||
"ts-node": "^10.9.2", | ||
"tslint": "^6.1.3", | ||
"typescript": "^5.5.3" | ||
@@ -79,20 +91,5 @@ }, | ||
"debug": "^4.3.5", | ||
"strtok3": "^6.3.0" | ||
"strtok3": "^8.0.1" | ||
}, | ||
"nyc": { | ||
"exclude": [ | ||
"test/**/*.ts", | ||
"src/**/*.js" | ||
], | ||
"extension": [ | ||
".ts" | ||
], | ||
"sourceMap": true, | ||
"instrument": true, | ||
"reporter": [ | ||
"lcov", | ||
"text" | ||
], | ||
"report-dir": "coverage" | ||
} | ||
"packageManager": "yarn@4.3.1" | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Yes
27019
23
495
+ Addedpeek-readable@5.3.1(transitive)
+ Addedstrtok3@8.1.0(transitive)
- Removedpeek-readable@4.1.0(transitive)
- Removedstrtok3@6.3.0(transitive)
Updatedstrtok3@^8.0.1