Socket
Socket
Sign inDemoInstall

strtok3

Package Overview
Dependencies
Maintainers
1
Versions
98
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

strtok3 - npm Package Compare versions

Comparing version 7.0.0-alpha.3 to 7.0.0-alpha.4

9

lib/AbstractTokenizer.d.ts
import { ITokenizer, IFileInfo, IReadChunkOptions } from './types';
import { IGetToken, IToken } from '@tokenizer/token';
interface INormalizedReadChunkOptions extends IReadChunkOptions {
offset: number;
length: number;
position: number;
mayBeLess?: boolean;
}
/**

@@ -61,3 +67,4 @@ * Core tokenizer

close(): Promise<void>;
protected normalizeOptions(uint8Array: Uint8Array, options?: IReadChunkOptions): IReadChunkOptions;
protected normalizeOptions(uint8Array: Uint8Array, options?: IReadChunkOptions): INormalizedReadChunkOptions;
}
export {};

36

lib/AbstractTokenizer.js

@@ -68,11 +68,11 @@ import { EndOfStreamError } from 'peek-readable';

async ignore(length) {
const bytesLeft = this.fileInfo.size - this.position;
if (length <= bytesLeft) {
this.position += length;
return length;
if (this.fileInfo.size !== undefined) {
const bytesLeft = this.fileInfo.size - this.position;
if (length > bytesLeft) {
this.position += bytesLeft;
return bytesLeft;
}
}
else {
this.position += bytesLeft;
return bytesLeft;
}
this.position += length;
return length;
}

@@ -83,10 +83,20 @@ async close() {

normalizeOptions(uint8Array, options) {
options = {
if (options && options.position !== undefined && options.position < this.position) {
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
}
if (options) {
return {
mayBeLess: options.mayBeLess === true,
offset: options.offset ? options.offset : 0,
length: options.length ? options.length : (uint8Array.length - (options.offset ? options.offset : 0)),
position: options.position ? options.position : this.position
};
}
return {
mayBeLess: false,
offset: 0,
length: uint8Array.length - ((options && options.offset) ? options.offset : 0),
position: this.position,
...options
length: uint8Array.length,
position: this.position
};
return options;
}
}

@@ -38,9 +38,9 @@ import { EndOfStreamError } from 'peek-readable';

async peekBuffer(uint8Array, options) {
options = this.normalizeOptions(uint8Array, options);
const bytes2read = Math.min(this.uint8Array.length - options.position, options.length);
if ((!options.mayBeLess) && bytes2read < options.length) {
const normOptions = this.normalizeOptions(uint8Array, options);
const bytes2read = Math.min(this.uint8Array.length - normOptions.position, normOptions.length);
if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) {
throw new EndOfStreamError();
}
else {
uint8Array.set(this.uint8Array.subarray(options.position, options.position + bytes2read), options.offset);
uint8Array.set(this.uint8Array.subarray(normOptions.position, normOptions.position + bytes2read), normOptions.offset);
return bytes2read;

@@ -47,0 +47,0 @@ }

/// <reference types="node" />
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
import * as Stream from 'node:stream';
import { Readable } from 'node:stream';
import { BufferTokenizer } from './BufferTokenizer.js';
import { IFileInfo } from './types.js';
export { EndOfStreamError } from 'peek-readable';
export { ITokenizer, IFileInfo } from './types';
export { ITokenizer, IFileInfo } from './types.js';
export { IToken, IGetToken } from '@tokenizer/token';

@@ -16,3 +16,3 @@ /**

*/
export declare function fromStream(stream: Stream.Readable, fileInfo?: IFileInfo): ReadStreamTokenizer;
export declare function fromStream(stream: Readable, fileInfo?: IFileInfo): ReadStreamTokenizer;
/**

@@ -19,0 +19,0 @@ * Construct ReadStreamTokenizer from given Buffer.

@@ -1,2 +0,1 @@

/// <reference types="node" />
import { AbstractTokenizer } from './AbstractTokenizer.js';

@@ -9,7 +8,7 @@ import { IFileInfo, IReadChunkOptions } from './types';

* Read buffer from file
* @param buffer
* @param uint8Array - Uint8Array to write result to
* @param options - Read behaviour options
* @returns Promise number of bytes read
*/
readBuffer(buffer: Buffer, options?: IReadChunkOptions): Promise<number>;
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**

@@ -16,0 +15,0 @@ * Peek buffer from file

@@ -11,32 +11,12 @@ import { AbstractTokenizer } from './AbstractTokenizer.js';

* Read buffer from file
* @param buffer
* @param uint8Array - Uint8Array to write result to
* @param options - Read behaviour options
* @returns Promise number of bytes read
*/
async readBuffer(buffer, options) {
let offset = 0;
let length = buffer.length;
if (options) {
if (options.position) {
if (options.position < this.position) {
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
}
this.position = options.position;
}
if (Number.isInteger(options.length)) {
length = options.length;
}
else {
length -= options.offset || 0;
}
if (options.offset) {
offset = options.offset;
}
}
if (length === 0) {
return Promise.resolve(0);
}
const res = await fs.read(this.fd, buffer, offset, length, this.position);
async readBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
this.position = normOptions.position;
const res = await fs.read(this.fd, uint8Array, normOptions.offset, normOptions.length, normOptions.position);
this.position += res.bytesRead;
if (res.bytesRead < length && (!options || !options.mayBeLess)) {
if (res.bytesRead < normOptions.length && (!options || !options.mayBeLess)) {
throw new EndOfStreamError();

@@ -53,5 +33,5 @@ }

async peekBuffer(uint8Array, options) {
options = this.normalizeOptions(uint8Array, options);
const res = await fs.read(this.fd, uint8Array, options.offset, options.length, options.position);
if ((!options.mayBeLess) && res.bytesRead < options.length) {
const normOptions = this.normalizeOptions(uint8Array, options);
const res = await fs.read(this.fd, uint8Array, normOptions.offset, normOptions.length, normOptions.position);
if ((!normOptions.mayBeLess) && res.bytesRead < normOptions.length) {
throw new EndOfStreamError();

@@ -58,0 +38,0 @@ }

@@ -5,3 +5,3 @@ /**

/// <reference types="node" />
import * as fs from 'node:fs';
import fs from 'node:fs';
export interface IReadResult {

@@ -15,3 +15,3 @@ bytesRead: number;

export declare function close(fd: number): Promise<void>;
export declare function open(path: fs.PathLike, mode?: string): Promise<number>;
export declare function open(path: fs.PathLike, mode: fs.Mode): Promise<number>;
export declare function read(fd: number, buffer: Uint8Array, offset: number, length: number, position: number): Promise<IReadResult>;

@@ -18,0 +18,0 @@ export declare function writeFile(path: fs.PathLike, data: Buffer | string): Promise<void>;

/**
* Module convert fs functions to promise based functions
*/
import * as fs from 'node:fs';
import fs from 'node:fs';
export const pathExists = fs.existsSync;

@@ -6,0 +6,0 @@ export const createReadStream = fs.createReadStream;

/// <reference types="node" />
import * as Stream from 'stream';
import { Readable } from 'node:stream';
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';

@@ -15,2 +15,2 @@ import * as core from './core.js';

*/
export declare function fromStream(stream: Stream.Readable, fileInfo?: core.IFileInfo): Promise<ReadStreamTokenizer>;
export declare function fromStream(stream: Readable, fileInfo?: core.IFileInfo): Promise<ReadStreamTokenizer>;
/// <reference types="node" />
import { AbstractTokenizer } from './AbstractTokenizer.js';
import * as Stream from 'node:stream';
import { Readable } from 'node:stream';
import { IFileInfo, IReadChunkOptions } from './types';
export declare class ReadStreamTokenizer extends AbstractTokenizer {
private streamReader;
constructor(stream: Stream.Readable, fileInfo?: IFileInfo);
constructor(stream: Readable, fileInfo?: IFileInfo);
/**

@@ -15,7 +15,7 @@ * Get file information, an HTTP-client may implement this doing a HEAD request

* Read buffer from tokenizer
* @param buffer - Target buffer to fill with data read from the tokenizer-stream
* @param uint8Array - Target Uint8Array to fill with data read from the tokenizer-stream
* @param options - Read behaviour options
* @returns Promise with number of bytes read
*/
readBuffer(buffer: Uint8Array, options?: IReadChunkOptions): Promise<number>;
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**

@@ -22,0 +22,0 @@ * Peek (read ahead) buffer from tokenizer

@@ -18,38 +18,22 @@ import { AbstractTokenizer } from './AbstractTokenizer.js';

* Read buffer from tokenizer
* @param buffer - Target buffer to fill with data read from the tokenizer-stream
* @param uint8Array - Target Uint8Array to fill with data read from the tokenizer-stream
* @param options - Read behaviour options
* @returns Promise with number of bytes read
*/
async readBuffer(buffer, options) {
// const _offset = position ? position : this.position;
// debug(`readBuffer ${_offset}...${_offset + length - 1}`);
let offset = 0;
let length = buffer.length;
if (options) {
if (Number.isInteger(options.length)) {
length = options.length;
}
else {
length -= options.offset || 0;
}
if (options.position) {
const skipBytes = options.position - this.position;
if (skipBytes > 0) {
await this.ignore(skipBytes);
return this.readBuffer(buffer, options);
}
else if (skipBytes < 0) {
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
}
}
if (options.offset) {
offset = options.offset;
}
async readBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
const skipBytes = normOptions.position - this.position;
if (skipBytes > 0) {
await this.ignore(skipBytes);
return this.readBuffer(uint8Array, options);
}
if (length === 0) {
else if (skipBytes < 0) {
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
}
if (normOptions.length === 0) {
return 0;
}
const bytesRead = await this.streamReader.read(buffer, offset, length);
const bytesRead = await this.streamReader.read(uint8Array, normOptions.offset, normOptions.length);
this.position += bytesRead;
if ((!options || !options.mayBeLess) && bytesRead < length) {
if ((!options || !options.mayBeLess) && bytesRead < normOptions.length) {
throw new EndOfStreamError();

@@ -66,10 +50,10 @@ }

async peekBuffer(uint8Array, options) {
options = this.normalizeOptions(uint8Array, options);
const normOptions = this.normalizeOptions(uint8Array, options);
let bytesRead = 0;
if (options.position) {
const skipBytes = options.position - this.position;
if (normOptions.position) {
const skipBytes = normOptions.position - this.position;
if (skipBytes > 0) {
const skipBuffer = new Uint8Array(options.length + skipBytes);
bytesRead = await this.peekBuffer(skipBuffer, { mayBeLess: options.mayBeLess });
uint8Array.set(skipBuffer.subarray(skipBytes), options.offset);
const skipBuffer = new Uint8Array(normOptions.length + skipBytes);
bytesRead = await this.peekBuffer(skipBuffer, { mayBeLess: normOptions.mayBeLess });
uint8Array.set(skipBuffer.subarray(skipBytes), normOptions.offset);
return bytesRead - skipBytes;

@@ -81,5 +65,5 @@ }

}
if (options.length > 0) {
if (normOptions.length > 0) {
try {
bytesRead = await this.streamReader.peek(uint8Array, options.offset, options.length);
bytesRead = await this.streamReader.peek(uint8Array, normOptions.offset, normOptions.length);
}

@@ -92,3 +76,3 @@ catch (err) {

}
if ((!options.mayBeLess) && bytesRead < options.length) {
if ((!normOptions.mayBeLess) && bytesRead < normOptions.length) {
throw new EndOfStreamError();

@@ -95,0 +79,0 @@ }

{
"name": "strtok3",
"version": "7.0.0-alpha.3",
"version": "7.0.0-alpha.4",
"description": "A promise based streaming tokenizer",

@@ -20,5 +20,5 @@ "author": {

"eslint": "eslint lib test --ext .ts --ignore-pattern *.d.ts",
"fix": "eslint lib test --ext .ts --ignore-pattern *.d.ts --fix",
"lint-md": "remark -u preset-lint-recommended .",
"lint": "npm run lint-md && npm run eslint",
"fix": "eslint lib test --ext .ts --ignore-pattern *.d.ts --fix",
"test": "mocha",

@@ -41,3 +41,6 @@ "test-coverage": "c8 npm run test",

"exports": {
".": "./lib/index.js",
".": {
"node": "./lib/index.js",
"default": "./lib/core.js"
},
"./core": "./lib/core.js"

@@ -56,5 +59,5 @@ },

"@types/chai": "^4.2.21",
"@types/debug": "^4.1.6",
"@types/debug": "^4.1.7",
"@types/mocha": "^9.0.0",
"@types/node": "^16.4.6",
"@types/node": "^16.4.7",
"@typescript-eslint/eslint-plugin": "^2.34.0",

@@ -79,3 +82,4 @@ "@typescript-eslint/eslint-plugin-tslint": "^4.28.5",

"dependencies": {
"peek-readable": "^5.0.0-alpha.3"
"@tokenizer/token": "^0.3.0",
"peek-readable": "^5.0.0-alpha.4"
},

@@ -82,0 +86,0 @@ "keywords": [

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc