🚀 Big News: Socket Acquires Coana to Bring Reachability Analysis to Every Appsec Team.Learn more
Socket
Book a DemoInstallSign in
Socket

@tokey/core

Package Overview
Dependencies
Maintainers
3
Versions
6
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@tokey/core - npm Package Compare versions

Comparing version

to
1.4.0

3

dist/core.d.ts

@@ -11,5 +11,6 @@ import type { Token } from './types';

createToken(value: string, type: T['type'], start: number, end: number): T;
shouldClose?(ch: string, previousChar: string): boolean;
offset?: number;
}
export declare function tokenize<T extends Token<unknown>>(source: string, { isDelimiter, isStringDelimiter, isWhitespace, shouldAddToken, createToken, getCommentStartType, isCommentEnd, getUnclosedComment, offset, }: TokyOptions<T>): T[];
export declare function tokenize<T extends Token<unknown>>(source: string, { shouldClose, isDelimiter, isStringDelimiter, isWhitespace, shouldAddToken, createToken, getCommentStartType, isCommentEnd, getUnclosedComment, offset, }: TokyOptions<T>): T[];
//# sourceMappingURL=core.d.ts.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.tokenize = void 0;
function tokenize(source, { isDelimiter, isStringDelimiter, isWhitespace, shouldAddToken, createToken, getCommentStartType, isCommentEnd, getUnclosedComment, offset = 0, }) {
function tokenize(source, { shouldClose, isDelimiter, isStringDelimiter, isWhitespace, shouldAddToken, createToken, getCommentStartType, isCommentEnd, getUnclosedComment, offset = 0, }) {
const tokens = [];

@@ -50,2 +50,7 @@ let previousChar = '';

}
else if (shouldClose?.(ch, previousChar)) {
pushBuffer();
buffer += ch;
pushBuffer(ch);
}
else {

@@ -72,3 +77,3 @@ buffer += ch;

const end = start + buffer.length;
type = type !== null && type !== void 0 ? type : (buffer.trim().length ? 'text' : 'space');
type = type ?? (buffer.trim().length ? 'text' : 'space');
if (shouldAddToken(type, buffer)) {

@@ -75,0 +80,0 @@ tokens[tokens.length] = createToken(buffer, type, start, end);

@@ -1,2 +0,2 @@

export declare type Descriptors = 'string' | 'text' | 'line-comment' | 'multi-comment' | 'unclosed-string' | 'unclosed-comment' | 'space';
export type Descriptors = 'string' | 'text' | 'line-comment' | 'multi-comment' | 'unclosed-string' | 'unclosed-comment' | 'space';
export interface Token<Type = Descriptors> {

@@ -3,0 +3,0 @@ type: Type;

{
"name": "@tokey/core",
"description": "simple code like tokenizer",
"version": "1.3.0",
"version": "1.4.0",
"main": "dist/index.js",

@@ -6,0 +6,0 @@ "types": "dist/index.d.ts",

@@ -18,2 +18,3 @@ import type { Token } from './types';

createToken(value: string, type: T['type'], start: number, end: number): T;
shouldClose?(ch: string, previousChar: string): boolean;
offset?: number;

@@ -25,2 +26,3 @@ }

{
shouldClose,
isDelimiter,

@@ -75,2 +77,6 @@ isStringDelimiter,

buffer += ch;
} else if(shouldClose?.(ch, previousChar)) {
pushBuffer();
buffer += ch;
pushBuffer(ch);
} else {

@@ -77,0 +83,0 @@ buffer += ch;

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet