Comparing version 9.1.1 to 10.0.0
import type { ITokenizer, IFileInfo, IReadChunkOptions, ITokenizerOptions } from './types.js'; | ||
import type { IGetToken, IToken } from '@tokenizer/token'; | ||
interface INormalizedReadChunkOptions extends IReadChunkOptions { | ||
offset: number; | ||
length: number; | ||
@@ -6,0 +5,0 @@ position: number; |
@@ -93,18 +93,12 @@ import { EndOfStreamError } from 'peek-readable'; | ||
normalizeOptions(uint8Array, options) { | ||
if (options && options.position !== undefined && options.position < this.position) { | ||
if (!this.supportsRandomAccess() && options && options.position !== undefined && options.position < this.position) { | ||
throw new Error('`options.position` must be equal or greater than `tokenizer.position`'); | ||
} | ||
if (options) { | ||
return { | ||
mayBeLess: options.mayBeLess === true, | ||
offset: options.offset ? options.offset : 0, | ||
length: options.length ? options.length : (uint8Array.length - (options.offset ? options.offset : 0)), | ||
position: options.position ? options.position : this.position | ||
}; | ||
} | ||
return { | ||
mayBeLess: false, | ||
offset: 0, | ||
length: uint8Array.length, | ||
position: this.position | ||
...{ | ||
mayBeLess: false, | ||
offset: 0, | ||
length: uint8Array.length, | ||
position: this.position | ||
}, ...options | ||
}; | ||
@@ -111,0 +105,0 @@ } |
@@ -43,3 +43,3 @@ import { EndOfStreamError } from 'peek-readable'; | ||
} | ||
uint8Array.set(this.uint8Array.subarray(normOptions.position, normOptions.position + bytes2read), normOptions.offset); | ||
uint8Array.set(this.uint8Array.subarray(normOptions.position, normOptions.position + bytes2read)); | ||
return bytes2read; | ||
@@ -46,0 +46,0 @@ } |
@@ -30,3 +30,3 @@ import { AbstractTokenizer } from './AbstractTokenizer.js'; | ||
return 0; | ||
const res = await this.fileHandle.read(uint8Array, normOptions.offset, normOptions.length, normOptions.position); | ||
const res = await this.fileHandle.read(uint8Array, 0, normOptions.length, normOptions.position); | ||
this.position += res.bytesRead; | ||
@@ -46,3 +46,3 @@ if (res.bytesRead < normOptions.length && (!options || !options.mayBeLess)) { | ||
const normOptions = this.normalizeOptions(uint8Array, options); | ||
const res = await this.fileHandle.read(uint8Array, normOptions.offset, normOptions.length, normOptions.position); | ||
const res = await this.fileHandle.read(uint8Array, 0, normOptions.length, normOptions.position); | ||
if ((!normOptions.mayBeLess) && res.bytesRead < normOptions.length) { | ||
@@ -49,0 +49,0 @@ throw new EndOfStreamError(); |
@@ -34,3 +34,3 @@ import { AbstractTokenizer } from './AbstractTokenizer.js'; | ||
} | ||
const bytesRead = await this.streamReader.read(uint8Array, normOptions.offset, normOptions.length); | ||
const bytesRead = await this.streamReader.read(uint8Array, 0, normOptions.length); | ||
this.position += bytesRead; | ||
@@ -56,3 +56,3 @@ if ((!options || !options.mayBeLess) && bytesRead < normOptions.length) { | ||
bytesRead = await this.peekBuffer(skipBuffer, { mayBeLess: normOptions.mayBeLess }); | ||
uint8Array.set(skipBuffer.subarray(skipBytes), normOptions.offset); | ||
uint8Array.set(skipBuffer.subarray(skipBytes)); | ||
return bytesRead - skipBytes; | ||
@@ -66,3 +66,3 @@ } | ||
try { | ||
bytesRead = await this.streamReader.peek(uint8Array, normOptions.offset, normOptions.length); | ||
bytesRead = await this.streamReader.peek(uint8Array, 0, normOptions.length); | ||
} | ||
@@ -69,0 +69,0 @@ catch (err) { |
@@ -28,6 +28,2 @@ import type { IGetToken } from '@tokenizer/token'; | ||
/** | ||
* The offset in the buffer to start writing at; default is 0 | ||
*/ | ||
offset?: number; | ||
/** | ||
* Number of bytes to read. | ||
@@ -34,0 +30,0 @@ */ |
{ | ||
"name": "strtok3", | ||
"version": "9.1.1", | ||
"version": "10.0.0", | ||
"description": "A promise based streaming tokenizer", | ||
@@ -5,0 +5,0 @@ "author": { |
@@ -296,3 +296,2 @@ [![Node.js CI](https://github.com/Borewit/strtok3/actions/workflows/nodejs-ci.yml/badge.svg?branch=master)](https://github.com/Borewit/strtok3/actions/workflows/nodejs-ci.yml?query=branch%3Amaster) | ||
|-----------|---------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | ||
| offset | number | The offset in the buffer to start writing at; if not provided, start at 0 | | ||
| length | number | Requested number of bytes to read. | | ||
@@ -299,0 +298,0 @@ | position | number | Position where to peek from the file. If position is null, data will be read from the [current file position](#attribute-tokenizerposition). Position may not be less then [tokenizer.position](#attribute-tokenizerposition) | |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
51845
739
381