Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

strtok3

Package Overview
Dependencies
Maintainers
1
Versions
99
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

strtok3 - npm Package Compare versions

Comparing version 7.0.0-alpha.4 to 7.0.0-alpha.5

0

lib/AbstractTokenizer.d.ts

@@ -0,0 +0,0 @@ import { ITokenizer, IFileInfo, IReadChunkOptions } from './types';

16

lib/AbstractTokenizer.js

@@ -1,6 +0,9 @@

import { EndOfStreamError } from 'peek-readable';
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.AbstractTokenizer = void 0;
const peek_readable_1 = require("peek-readable");
/**
* Core tokenizer
*/
export class AbstractTokenizer {
class AbstractTokenizer {
constructor(fileInfo) {

@@ -24,3 +27,3 @@ /**

if (len < token.len)
throw new EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
return token.get(uint8Array, 0);

@@ -38,3 +41,3 @@ }

if (len < token.len)
throw new EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
return token.get(uint8Array, 0);

@@ -50,3 +53,3 @@ }

if (len < token.len)
throw new EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
return token.get(this.numBuffer, 0);

@@ -62,3 +65,3 @@ }

if (len < token.len)
throw new EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
return token.get(this.numBuffer, 0);

@@ -105,1 +108,2 @@ }

}
exports.AbstractTokenizer = AbstractTokenizer;
import { IFileInfo, IReadChunkOptions } from './types';
import { AbstractTokenizer } from './AbstractTokenizer.js';
import { AbstractTokenizer } from './AbstractTokenizer';
export declare class BufferTokenizer extends AbstractTokenizer {

@@ -4,0 +4,0 @@ private uint8Array;

@@ -1,4 +0,7 @@

import { EndOfStreamError } from 'peek-readable';
import { AbstractTokenizer } from './AbstractTokenizer.js';
export class BufferTokenizer extends AbstractTokenizer {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BufferTokenizer = void 0;
const peek_readable_1 = require("peek-readable");
const AbstractTokenizer_1 = require("./AbstractTokenizer");
class BufferTokenizer extends AbstractTokenizer_1.AbstractTokenizer {
/**

@@ -41,3 +44,3 @@ * Construct BufferTokenizer

if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) {
throw new EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
}

@@ -53,1 +56,2 @@ else {

}
exports.BufferTokenizer = BufferTokenizer;
/// <reference types="node" />
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
import { Readable } from 'node:stream';
import { BufferTokenizer } from './BufferTokenizer.js';
import { IFileInfo } from './types.js';
import { ReadStreamTokenizer } from './ReadStreamTokenizer';
import { Readable } from 'stream';
import { BufferTokenizer } from './BufferTokenizer';
import { IFileInfo } from './types';
export { EndOfStreamError } from 'peek-readable';
export { ITokenizer, IFileInfo } from './types.js';
export { ITokenizer, IFileInfo } from './types';
export { IToken, IGetToken } from '@tokenizer/token';

@@ -13,3 +13,3 @@ /**

* @param stream - Read from Node.js Stream.Readable
* @param fileInfo - Pass the file information, like size and MIME-type of the correspnding stream.
* @param fileInfo - Pass the file information, like size and MIME-type of the corresponding stream.
* @returns ReadStreamTokenizer

@@ -16,0 +16,0 @@ */

@@ -1,4 +0,8 @@

import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
import { BufferTokenizer } from './BufferTokenizer.js';
export { EndOfStreamError } from 'peek-readable';
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.fromBuffer = exports.fromStream = exports.EndOfStreamError = void 0;
const ReadStreamTokenizer_1 = require("./ReadStreamTokenizer");
const BufferTokenizer_1 = require("./BufferTokenizer");
var peek_readable_1 = require("peek-readable");
Object.defineProperty(exports, "EndOfStreamError", { enumerable: true, get: function () { return peek_readable_1.EndOfStreamError; } });
/**

@@ -8,9 +12,10 @@ * Construct ReadStreamTokenizer from given Stream.

* @param stream - Read from Node.js Stream.Readable
* @param fileInfo - Pass the file information, like size and MIME-type of the correspnding stream.
* @param fileInfo - Pass the file information, like size and MIME-type of the corresponding stream.
* @returns ReadStreamTokenizer
*/
export function fromStream(stream, fileInfo) {
function fromStream(stream, fileInfo) {
fileInfo = fileInfo ? fileInfo : {};
return new ReadStreamTokenizer(stream, fileInfo);
return new ReadStreamTokenizer_1.ReadStreamTokenizer(stream, fileInfo);
}
exports.fromStream = fromStream;
/**

@@ -22,4 +27,5 @@ * Construct ReadStreamTokenizer from given Buffer.

*/
export function fromBuffer(uint8Array, fileInfo) {
return new BufferTokenizer(uint8Array, fileInfo);
function fromBuffer(uint8Array, fileInfo) {
return new BufferTokenizer_1.BufferTokenizer(uint8Array, fileInfo);
}
exports.fromBuffer = fromBuffer;

@@ -1,2 +0,2 @@

import { AbstractTokenizer } from './AbstractTokenizer.js';
import { AbstractTokenizer } from './AbstractTokenizer';
import { IFileInfo, IReadChunkOptions } from './types';

@@ -3,0 +3,0 @@ export declare class FileTokenizer extends AbstractTokenizer {

@@ -1,5 +0,8 @@

import { AbstractTokenizer } from './AbstractTokenizer.js';
import { EndOfStreamError } from 'peek-readable';
import * as fs from './FsPromise.js';
export class FileTokenizer extends AbstractTokenizer {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.fromFile = exports.FileTokenizer = void 0;
const AbstractTokenizer_1 = require("./AbstractTokenizer");
const peek_readable_1 = require("peek-readable");
const fs = require("./FsPromise");
class FileTokenizer extends AbstractTokenizer_1.AbstractTokenizer {
constructor(fd, fileInfo) {

@@ -21,3 +24,3 @@ super(fileInfo);

if (res.bytesRead < normOptions.length && (!options || !options.mayBeLess)) {
throw new EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
}

@@ -36,3 +39,3 @@ return res.bytesRead;

if ((!normOptions.mayBeLess) && res.bytesRead < normOptions.length) {
throw new EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
}

@@ -45,3 +48,4 @@ return res.bytesRead;

}
export async function fromFile(sourceFilePath) {
exports.FileTokenizer = FileTokenizer;
async function fromFile(sourceFilePath) {
const stat = await fs.stat(sourceFilePath);

@@ -54,1 +58,2 @@ if (!stat.isFile) {

}
exports.fromFile = fromFile;

@@ -5,3 +5,3 @@ /**

/// <reference types="node" />
import fs from 'node:fs';
import * as fs from 'fs';
export interface IReadResult {

@@ -8,0 +8,0 @@ bytesRead: number;

@@ -0,8 +1,11 @@

"use strict";
/**
* Module convert fs functions to promise based functions
*/
import fs from 'node:fs';
export const pathExists = fs.existsSync;
export const createReadStream = fs.createReadStream;
export async function stat(path) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.readFile = exports.writeFileSync = exports.writeFile = exports.read = exports.open = exports.close = exports.stat = exports.createReadStream = exports.pathExists = void 0;
const fs = require("fs");
exports.pathExists = fs.existsSync;
exports.createReadStream = fs.createReadStream;
async function stat(path) {
return new Promise((resolve, reject) => {

@@ -17,3 +20,4 @@ fs.stat(path, (err, stats) => {

}
export async function close(fd) {
exports.stat = stat;
async function close(fd) {
return new Promise((resolve, reject) => {

@@ -28,3 +32,4 @@ fs.close(fd, err => {

}
export async function open(path, mode) {
exports.close = close;
async function open(path, mode) {
return new Promise((resolve, reject) => {

@@ -39,3 +44,4 @@ fs.open(path, mode, (err, fd) => {

}
export async function read(fd, buffer, offset, length, position) {
exports.open = open;
async function read(fd, buffer, offset, length, position) {
return new Promise((resolve, reject) => {

@@ -50,3 +56,4 @@ fs.read(fd, buffer, offset, length, position, (err, bytesRead, _buffer) => {

}
export async function writeFile(path, data) {
exports.read = read;
async function writeFile(path, data) {
return new Promise((resolve, reject) => {

@@ -61,6 +68,8 @@ fs.writeFile(path, data, err => {

}
export function writeFileSync(path, data) {
exports.writeFile = writeFile;
function writeFileSync(path, data) {
fs.writeFileSync(path, data);
}
export async function readFile(path) {
exports.writeFileSync = writeFileSync;
async function readFile(path) {
return new Promise((resolve, reject) => {

@@ -75,1 +84,2 @@ fs.readFile(path, (err, buffer) => {

}
exports.readFile = readFile;
/// <reference types="node" />
import { Readable } from 'node:stream';
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
import * as core from './core.js';
export { fromFile } from './FileTokenizer.js';
export { ITokenizer, EndOfStreamError, fromBuffer, IFileInfo } from './core.js';
import { Readable } from 'stream';
import { ReadStreamTokenizer } from './ReadStreamTokenizer';
import * as core from './core';
export { fromFile } from './FileTokenizer';
export { ITokenizer, EndOfStreamError, fromBuffer, IFileInfo } from './core';
export { IToken, IGetToken } from '@tokenizer/token';

@@ -8,0 +8,0 @@ /**

@@ -1,5 +0,11 @@

import * as fs from './FsPromise.js';
import * as core from './core.js';
export { fromFile } from './FileTokenizer.js';
export { EndOfStreamError, fromBuffer } from './core.js';
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.fromStream = exports.fromBuffer = exports.EndOfStreamError = exports.fromFile = void 0;
const fs = require("./FsPromise");
const core = require("./core");
var FileTokenizer_1 = require("./FileTokenizer");
Object.defineProperty(exports, "fromFile", { enumerable: true, get: function () { return FileTokenizer_1.fromFile; } });
var core_1 = require("./core");
Object.defineProperty(exports, "EndOfStreamError", { enumerable: true, get: function () { return core_1.EndOfStreamError; } });
Object.defineProperty(exports, "fromBuffer", { enumerable: true, get: function () { return core_1.fromBuffer; } });
/**

@@ -12,3 +18,3 @@ * Construct ReadStreamTokenizer from given Stream.

*/
export async function fromStream(stream, fileInfo) {
async function fromStream(stream, fileInfo) {
fileInfo = fileInfo ? fileInfo : {};

@@ -22,1 +28,2 @@ if (stream.path) {

}
exports.fromStream = fromStream;
/// <reference types="node" />
import { AbstractTokenizer } from './AbstractTokenizer.js';
import { Readable } from 'node:stream';
import { AbstractTokenizer } from './AbstractTokenizer';
import { Readable } from 'stream';
import { IFileInfo, IReadChunkOptions } from './types';

@@ -5,0 +5,0 @@ export declare class ReadStreamTokenizer extends AbstractTokenizer {

@@ -1,8 +0,11 @@

import { AbstractTokenizer } from './AbstractTokenizer.js';
import { EndOfStreamError, StreamReader } from 'peek-readable';
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ReadStreamTokenizer = void 0;
const AbstractTokenizer_1 = require("./AbstractTokenizer");
const peek_readable_1 = require("peek-readable");
const maxBufferSize = 256000;
export class ReadStreamTokenizer extends AbstractTokenizer {
class ReadStreamTokenizer extends AbstractTokenizer_1.AbstractTokenizer {
constructor(stream, fileInfo) {
super(fileInfo);
this.streamReader = new StreamReader(stream);
this.streamReader = new peek_readable_1.StreamReader(stream);
}

@@ -38,3 +41,3 @@ /**

if ((!options || !options.mayBeLess) && bytesRead < normOptions.length) {
throw new EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
}

@@ -69,3 +72,3 @@ return bytesRead;

catch (err) {
if (options && options.mayBeLess && err instanceof EndOfStreamError) {
if (options && options.mayBeLess && err instanceof peek_readable_1.EndOfStreamError) {
return 0;

@@ -76,3 +79,3 @@ }

if ((!normOptions.mayBeLess) && bytesRead < normOptions.length) {
throw new EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
}

@@ -98,1 +101,2 @@ }

}
exports.ReadStreamTokenizer = ReadStreamTokenizer;

@@ -0,0 +0,0 @@ /// <reference types="node" />

@@ -1,1 +0,2 @@

export {};
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
{
"name": "strtok3",
"version": "7.0.0-alpha.4",
"version": "7.0.0-alpha.5",
"description": "A promise based streaming tokenizer",

@@ -55,11 +55,10 @@ "author": {

"devDependencies": {
"@tokenizer/token": "^0.3.0",
"@types/chai": "^4.2.21",
"@types/chai": "^4.3.0",
"@types/debug": "^4.1.7",
"@types/mocha": "^9.0.0",
"@types/node": "^16.4.7",
"@types/node": "^16.11.12",
"@typescript-eslint/eslint-plugin": "^2.34.0",
"@typescript-eslint/eslint-plugin-tslint": "^4.28.5",
"@typescript-eslint/eslint-plugin-tslint": "^5.6.0",
"@typescript-eslint/parser": "^2.34.0",
"c8": "^7.8.0",
"c8": "^7.10.0",
"chai": "^4.3.4",

@@ -69,11 +68,11 @@ "coveralls": "^3.1.1",

"eslint": "^6.8.0",
"eslint-plugin-import": "^2.23.4",
"eslint-plugin-import": "^2.25.3",
"eslint-plugin-prefer-arrow": "^1.2.3",
"mocha": "^9.0.3",
"remark-cli": "^9.0.0",
"remark-preset-lint-recommended": "^5.0.0",
"token-types": "^4.1.0",
"ts-node": "^10.1.0",
"mocha": "^9.1.3",
"remark-cli": "^10.0.1",
"remark-preset-lint-recommended": "^6.1.2",
"token-types": "^4.1.1",
"ts-node": "^10.4.0",
"tslint": "^6.1.3",
"typescript": "^4.3.5"
"typescript": "^4.5.3"
},

@@ -80,0 +79,0 @@ "dependencies": {

@@ -0,0 +0,0 @@ ![Node.js CI](https://github.com/Borewit/strtok3/workflows/Node.js%20CI/badge.svg)

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc