Socket
Socket
Sign inDemoInstall

strtok3

Package Overview
Dependencies
Maintainers
1
Versions
98
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

strtok3 - npm Package Compare versions

Comparing version 5.0.0 to 5.0.1

10

lib/AbstractTokenizer.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const then_read_stream_1 = require("then-read-stream");
const peek_readable_1 = require("peek-readable");
/**

@@ -27,3 +27,3 @@ * Core tokenizer

if (!maybeless && len < token.len)
throw new then_read_stream_1.EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
return token.get(buffer, 0);

@@ -42,3 +42,3 @@ }

if (!maybeless && len < token.len)
throw new then_read_stream_1.EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
return token.get(buffer, 0);

@@ -54,3 +54,3 @@ }

if (len < token.len)
throw new then_read_stream_1.EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
return token.get(this.numBuffer, 0);

@@ -66,3 +66,3 @@ }

if (len < token.len)
throw new then_read_stream_1.EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
return token.get(this.numBuffer, 0);

@@ -69,0 +69,0 @@ }

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const then_read_stream_1 = require("then-read-stream");
const peek_readable_1 = require("peek-readable");
class BufferTokenizer {

@@ -47,3 +47,3 @@ /**

if (!maybeLess && bytes2read < length) {
throw new then_read_stream_1.EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
}

@@ -69,3 +69,3 @@ else {

if (this.buffer.length - position < token.len) {
throw new then_read_stream_1.EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
}

@@ -72,0 +72,0 @@ return token.get(this.buffer, position);

@@ -6,3 +6,3 @@ /// <reference types="node" />

import { IFileInfo } from './types';
export { EndOfStreamError } from 'then-read-stream';
export { EndOfStreamError } from 'peek-readable';
export { ITokenizer, IFileInfo } from './types';

@@ -9,0 +9,0 @@ export { IToken, IGetToken } from '@tokenizer/token';

@@ -5,4 +5,4 @@ "use strict";

const BufferTokenizer_1 = require("./BufferTokenizer");
var then_read_stream_1 = require("then-read-stream");
exports.EndOfStreamError = then_read_stream_1.EndOfStreamError;
var peek_readable_1 = require("peek-readable");
exports.EndOfStreamError = peek_readable_1.EndOfStreamError;
/**

@@ -9,0 +9,0 @@ * Construct ReadStreamTokenizer from given Stream.

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const AbstractTokenizer_1 = require("./AbstractTokenizer");
const then_read_stream_1 = require("then-read-stream");
const peek_readable_1 = require("peek-readable");
const fs = require("./FsPromise");

@@ -31,6 +31,6 @@ class FileTokenizer extends AbstractTokenizer_1.AbstractTokenizer {

if (res.bytesRead < length)
throw new then_read_stream_1.EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
this.position += res.bytesRead;
if (res.bytesRead < length) {
throw new then_read_stream_1.EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
}

@@ -51,3 +51,3 @@ return res.bytesRead;

if (!maybeless && res.bytesRead < length) {
throw new then_read_stream_1.EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
}

@@ -54,0 +54,0 @@ return res.bytesRead;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const AbstractTokenizer_1 = require("./AbstractTokenizer");
const then_read_stream_1 = require("then-read-stream");
const peek_readable_1 = require("peek-readable");
const _debug = require("debug");

@@ -11,3 +11,3 @@ const debug = _debug('strtok3:ReadStreamTokenizer');

super(fileInfo);
this.streamReader = new then_read_stream_1.StreamReader(stream);
this.streamReader = new peek_readable_1.StreamReader(stream);
}

@@ -49,3 +49,3 @@ /**

if (!maybeless && bytesRead < length) {
throw new then_read_stream_1.EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
}

@@ -81,3 +81,3 @@ return bytesRead;

if (!maybeless && bytesRead < length) {
throw new then_read_stream_1.EndOfStreamError();
throw new peek_readable_1.EndOfStreamError();
}

@@ -84,0 +84,0 @@ return bytesRead;

{
"name": "strtok3",
"version": "5.0.0",
"version": "5.0.1",
"description": "A promise based streaming tokenizer",

@@ -40,5 +40,5 @@ "author": {

"@types/mocha": "^5.2.7",
"@types/node": "^13.1.1",
"@typescript-eslint/eslint-plugin": "^2.13.0",
"@typescript-eslint/parser": "^2.13.0",
"@types/node": "^13.1.4",
"@typescript-eslint/eslint-plugin": "^2.14.0",
"@typescript-eslint/parser": "^2.14.0",
"chai": "^4.2.0",

@@ -48,3 +48,3 @@ "coveralls": "^3.0.7",

"eslint": "^6.8.0",
"mocha": "^6.2.2",
"mocha": "^7.0.0",
"nyc": "^15.0.0",

@@ -61,3 +61,3 @@ "remark-cli": "^7.0.1",

"debug": "^4.1.1",
"then-read-stream": "^3.0.0"
"peek-readable": "^3.1.0"
},

@@ -64,0 +64,0 @@ "keywords": [

@@ -26,2 +26,3 @@ [![Build Status](https://travis-ci.org/Borewit/strtok3.svg?branch=master)](https://travis-ci.org/Borewit/strtok3)

* HTTP chunked transfer provided by [@tokenizer/http](https://github.com/Borewit/tokenizer-http).
* Chunked [Amazon S3](https://aws.amazon.com/s3) access provided by [@tokenizer/s3](https://github.com/Borewit/tokenizer-s3).

@@ -28,0 +29,0 @@ ## Installation

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc