Comparing version 7.0.0-alpha.5 to 7.0.0-alpha.6
@@ -1,9 +0,7 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.AbstractTokenizer = void 0; | ||
const peek_readable_1 = require("peek-readable"); | ||
import { EndOfStreamError } from 'peek-readable'; | ||
import { Buffer } from 'node:buffer'; | ||
/** | ||
* Core tokenizer | ||
*/ | ||
class AbstractTokenizer { | ||
export class AbstractTokenizer { | ||
constructor(fileInfo) { | ||
@@ -27,3 +25,3 @@ /** | ||
if (len < token.len) | ||
throw new peek_readable_1.EndOfStreamError(); | ||
throw new EndOfStreamError(); | ||
return token.get(uint8Array, 0); | ||
@@ -41,3 +39,3 @@ } | ||
if (len < token.len) | ||
throw new peek_readable_1.EndOfStreamError(); | ||
throw new EndOfStreamError(); | ||
return token.get(uint8Array, 0); | ||
@@ -53,3 +51,3 @@ } | ||
if (len < token.len) | ||
throw new peek_readable_1.EndOfStreamError(); | ||
throw new EndOfStreamError(); | ||
return token.get(this.numBuffer, 0); | ||
@@ -65,3 +63,3 @@ } | ||
if (len < token.len) | ||
throw new peek_readable_1.EndOfStreamError(); | ||
throw new EndOfStreamError(); | ||
return token.get(this.numBuffer, 0); | ||
@@ -108,2 +106,1 @@ } | ||
} | ||
exports.AbstractTokenizer = AbstractTokenizer; |
import { IFileInfo, IReadChunkOptions } from './types'; | ||
import { AbstractTokenizer } from './AbstractTokenizer'; | ||
import { AbstractTokenizer } from './AbstractTokenizer.js'; | ||
export declare class BufferTokenizer extends AbstractTokenizer { | ||
@@ -4,0 +4,0 @@ private uint8Array; |
@@ -1,7 +0,4 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.BufferTokenizer = void 0; | ||
const peek_readable_1 = require("peek-readable"); | ||
const AbstractTokenizer_1 = require("./AbstractTokenizer"); | ||
class BufferTokenizer extends AbstractTokenizer_1.AbstractTokenizer { | ||
import { EndOfStreamError } from 'peek-readable'; | ||
import { AbstractTokenizer } from './AbstractTokenizer.js'; | ||
export class BufferTokenizer extends AbstractTokenizer { | ||
/** | ||
@@ -44,3 +41,3 @@ * Construct BufferTokenizer | ||
if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) { | ||
throw new peek_readable_1.EndOfStreamError(); | ||
throw new EndOfStreamError(); | ||
} | ||
@@ -56,2 +53,1 @@ else { | ||
} | ||
exports.BufferTokenizer = BufferTokenizer; |
/// <reference types="node" /> | ||
import { ReadStreamTokenizer } from './ReadStreamTokenizer'; | ||
import { Readable } from 'stream'; | ||
import { BufferTokenizer } from './BufferTokenizer'; | ||
import { IFileInfo } from './types'; | ||
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js'; | ||
import { Readable } from 'node:stream'; | ||
import { BufferTokenizer } from './BufferTokenizer.js'; | ||
import { IFileInfo } from './types.js'; | ||
export { EndOfStreamError } from 'peek-readable'; | ||
export { ITokenizer, IFileInfo } from './types'; | ||
export { ITokenizer, IFileInfo } from './types.js'; | ||
export { IToken, IGetToken } from '@tokenizer/token'; | ||
@@ -9,0 +9,0 @@ /** |
@@ -1,8 +0,4 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.fromBuffer = exports.fromStream = exports.EndOfStreamError = void 0; | ||
const ReadStreamTokenizer_1 = require("./ReadStreamTokenizer"); | ||
const BufferTokenizer_1 = require("./BufferTokenizer"); | ||
var peek_readable_1 = require("peek-readable"); | ||
Object.defineProperty(exports, "EndOfStreamError", { enumerable: true, get: function () { return peek_readable_1.EndOfStreamError; } }); | ||
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js'; | ||
import { BufferTokenizer } from './BufferTokenizer.js'; | ||
export { EndOfStreamError } from 'peek-readable'; | ||
/** | ||
@@ -15,7 +11,6 @@ * Construct ReadStreamTokenizer from given Stream. | ||
*/ | ||
function fromStream(stream, fileInfo) { | ||
export function fromStream(stream, fileInfo) { | ||
fileInfo = fileInfo ? fileInfo : {}; | ||
return new ReadStreamTokenizer_1.ReadStreamTokenizer(stream, fileInfo); | ||
return new ReadStreamTokenizer(stream, fileInfo); | ||
} | ||
exports.fromStream = fromStream; | ||
/** | ||
@@ -27,5 +22,4 @@ * Construct ReadStreamTokenizer from given Buffer. | ||
*/ | ||
function fromBuffer(uint8Array, fileInfo) { | ||
return new BufferTokenizer_1.BufferTokenizer(uint8Array, fileInfo); | ||
export function fromBuffer(uint8Array, fileInfo) { | ||
return new BufferTokenizer(uint8Array, fileInfo); | ||
} | ||
exports.fromBuffer = fromBuffer; |
@@ -1,3 +0,3 @@ | ||
import { AbstractTokenizer } from './AbstractTokenizer'; | ||
import { IFileInfo, IReadChunkOptions } from './types'; | ||
import { AbstractTokenizer } from './AbstractTokenizer.js'; | ||
import { IFileInfo, IReadChunkOptions } from './types.js'; | ||
export declare class FileTokenizer extends AbstractTokenizer { | ||
@@ -4,0 +4,0 @@ private fd; |
@@ -1,8 +0,5 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.fromFile = exports.FileTokenizer = void 0; | ||
const AbstractTokenizer_1 = require("./AbstractTokenizer"); | ||
const peek_readable_1 = require("peek-readable"); | ||
const fs = require("./FsPromise"); | ||
class FileTokenizer extends AbstractTokenizer_1.AbstractTokenizer { | ||
import { AbstractTokenizer } from './AbstractTokenizer.js'; | ||
import { EndOfStreamError } from 'peek-readable'; | ||
import * as fs from './FsPromise.js'; | ||
export class FileTokenizer extends AbstractTokenizer { | ||
constructor(fd, fileInfo) { | ||
@@ -24,3 +21,3 @@ super(fileInfo); | ||
if (res.bytesRead < normOptions.length && (!options || !options.mayBeLess)) { | ||
throw new peek_readable_1.EndOfStreamError(); | ||
throw new EndOfStreamError(); | ||
} | ||
@@ -39,3 +36,3 @@ return res.bytesRead; | ||
if ((!normOptions.mayBeLess) && res.bytesRead < normOptions.length) { | ||
throw new peek_readable_1.EndOfStreamError(); | ||
throw new EndOfStreamError(); | ||
} | ||
@@ -48,4 +45,3 @@ return res.bytesRead; | ||
} | ||
exports.FileTokenizer = FileTokenizer; | ||
async function fromFile(sourceFilePath) { | ||
export async function fromFile(sourceFilePath) { | ||
const stat = await fs.stat(sourceFilePath); | ||
@@ -58,2 +54,1 @@ if (!stat.isFile) { | ||
} | ||
exports.fromFile = fromFile; |
@@ -5,3 +5,3 @@ /** | ||
/// <reference types="node" /> | ||
import * as fs from 'fs'; | ||
import fs from 'node:fs'; | ||
export interface IReadResult { | ||
@@ -8,0 +8,0 @@ bytesRead: number; |
@@ -1,11 +0,8 @@ | ||
"use strict"; | ||
/** | ||
* Module convert fs functions to promise based functions | ||
*/ | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.readFile = exports.writeFileSync = exports.writeFile = exports.read = exports.open = exports.close = exports.stat = exports.createReadStream = exports.pathExists = void 0; | ||
const fs = require("fs"); | ||
exports.pathExists = fs.existsSync; | ||
exports.createReadStream = fs.createReadStream; | ||
async function stat(path) { | ||
import fs from 'node:fs'; | ||
export const pathExists = fs.existsSync; | ||
export const createReadStream = fs.createReadStream; | ||
export async function stat(path) { | ||
return new Promise((resolve, reject) => { | ||
@@ -20,4 +17,3 @@ fs.stat(path, (err, stats) => { | ||
} | ||
exports.stat = stat; | ||
async function close(fd) { | ||
export async function close(fd) { | ||
return new Promise((resolve, reject) => { | ||
@@ -32,4 +28,3 @@ fs.close(fd, err => { | ||
} | ||
exports.close = close; | ||
async function open(path, mode) { | ||
export async function open(path, mode) { | ||
return new Promise((resolve, reject) => { | ||
@@ -44,4 +39,3 @@ fs.open(path, mode, (err, fd) => { | ||
} | ||
exports.open = open; | ||
async function read(fd, buffer, offset, length, position) { | ||
export async function read(fd, buffer, offset, length, position) { | ||
return new Promise((resolve, reject) => { | ||
@@ -56,4 +50,3 @@ fs.read(fd, buffer, offset, length, position, (err, bytesRead, _buffer) => { | ||
} | ||
exports.read = read; | ||
async function writeFile(path, data) { | ||
export async function writeFile(path, data) { | ||
return new Promise((resolve, reject) => { | ||
@@ -68,8 +61,6 @@ fs.writeFile(path, data, err => { | ||
} | ||
exports.writeFile = writeFile; | ||
function writeFileSync(path, data) { | ||
export function writeFileSync(path, data) { | ||
fs.writeFileSync(path, data); | ||
} | ||
exports.writeFileSync = writeFileSync; | ||
async function readFile(path) { | ||
export async function readFile(path) { | ||
return new Promise((resolve, reject) => { | ||
@@ -84,2 +75,1 @@ fs.readFile(path, (err, buffer) => { | ||
} | ||
exports.readFile = readFile; |
/// <reference types="node" /> | ||
import { Readable } from 'stream'; | ||
import { ReadStreamTokenizer } from './ReadStreamTokenizer'; | ||
import * as core from './core'; | ||
export { fromFile } from './FileTokenizer'; | ||
export { ITokenizer, EndOfStreamError, fromBuffer, IFileInfo } from './core'; | ||
import { Readable } from 'node:stream'; | ||
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js'; | ||
import * as core from './core.js'; | ||
export { fromFile } from './FileTokenizer.js'; | ||
export { ITokenizer, EndOfStreamError, fromBuffer, IFileInfo } from './core.js'; | ||
export { IToken, IGetToken } from '@tokenizer/token'; | ||
@@ -8,0 +8,0 @@ /** |
@@ -1,11 +0,5 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.fromStream = exports.fromBuffer = exports.EndOfStreamError = exports.fromFile = void 0; | ||
const fs = require("./FsPromise"); | ||
const core = require("./core"); | ||
var FileTokenizer_1 = require("./FileTokenizer"); | ||
Object.defineProperty(exports, "fromFile", { enumerable: true, get: function () { return FileTokenizer_1.fromFile; } }); | ||
var core_1 = require("./core"); | ||
Object.defineProperty(exports, "EndOfStreamError", { enumerable: true, get: function () { return core_1.EndOfStreamError; } }); | ||
Object.defineProperty(exports, "fromBuffer", { enumerable: true, get: function () { return core_1.fromBuffer; } }); | ||
import * as fs from './FsPromise.js'; | ||
import * as core from './core.js'; | ||
export { fromFile } from './FileTokenizer.js'; | ||
export { EndOfStreamError, fromBuffer } from './core.js'; | ||
/** | ||
@@ -18,3 +12,3 @@ * Construct ReadStreamTokenizer from given Stream. | ||
*/ | ||
async function fromStream(stream, fileInfo) { | ||
export async function fromStream(stream, fileInfo) { | ||
fileInfo = fileInfo ? fileInfo : {}; | ||
@@ -28,2 +22,1 @@ if (stream.path) { | ||
} | ||
exports.fromStream = fromStream; |
/// <reference types="node" /> | ||
import { AbstractTokenizer } from './AbstractTokenizer'; | ||
import { Readable } from 'stream'; | ||
import { IFileInfo, IReadChunkOptions } from './types'; | ||
import { AbstractTokenizer } from './AbstractTokenizer.js'; | ||
import { Readable } from 'node:stream'; | ||
import { IFileInfo, IReadChunkOptions } from './types.js'; | ||
export declare class ReadStreamTokenizer extends AbstractTokenizer { | ||
@@ -6,0 +6,0 @@ private streamReader; |
@@ -1,11 +0,8 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.ReadStreamTokenizer = void 0; | ||
const AbstractTokenizer_1 = require("./AbstractTokenizer"); | ||
const peek_readable_1 = require("peek-readable"); | ||
import { AbstractTokenizer } from './AbstractTokenizer.js'; | ||
import { EndOfStreamError, StreamReader } from 'peek-readable'; | ||
const maxBufferSize = 256000; | ||
class ReadStreamTokenizer extends AbstractTokenizer_1.AbstractTokenizer { | ||
export class ReadStreamTokenizer extends AbstractTokenizer { | ||
constructor(stream, fileInfo) { | ||
super(fileInfo); | ||
this.streamReader = new peek_readable_1.StreamReader(stream); | ||
this.streamReader = new StreamReader(stream); | ||
} | ||
@@ -41,3 +38,3 @@ /** | ||
if ((!options || !options.mayBeLess) && bytesRead < normOptions.length) { | ||
throw new peek_readable_1.EndOfStreamError(); | ||
throw new EndOfStreamError(); | ||
} | ||
@@ -72,3 +69,3 @@ return bytesRead; | ||
catch (err) { | ||
if (options && options.mayBeLess && err instanceof peek_readable_1.EndOfStreamError) { | ||
if (options && options.mayBeLess && err instanceof EndOfStreamError) { | ||
return 0; | ||
@@ -79,3 +76,3 @@ } | ||
if ((!normOptions.mayBeLess) && bytesRead < normOptions.length) { | ||
throw new peek_readable_1.EndOfStreamError(); | ||
throw new EndOfStreamError(); | ||
} | ||
@@ -101,2 +98,1 @@ } | ||
} | ||
exports.ReadStreamTokenizer = ReadStreamTokenizer; |
@@ -1,2 +0,1 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
export {}; |
{ | ||
"name": "strtok3", | ||
"version": "7.0.0-alpha.5", | ||
"version": "7.0.0-alpha.6", | ||
"description": "A promise based streaming tokenizer", | ||
@@ -5,0 +5,0 @@ "author": { |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Deprecated
MaintenanceThe maintainer of the package marked it as deprecated. This could indicate that a single version should not be used, or that the package is no longer maintained and any new vulnerabilities will not be fixed.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
0
0
49848
713