Socket
Socket
Sign inDemoInstall

@csstools/css-tokenizer

Package Overview
Dependencies
Maintainers
3
Versions
19
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@csstools/css-tokenizer - npm Package Compare versions

Comparing version 2.2.1 to 2.2.2

45

CHANGELOG.md
# Changes to CSS Tokenizer
### 2.2.2
_December 15, 2023_
- Fix type definitions
### 2.2.1

@@ -17,39 +23,2 @@

### 2.1.1
_April 10, 2023_
- Document `tokenize` helper function
### 2.1.0
_February 21, 2023_
- Add `tokenize` helper function
### 2.0.2
_February 13, 2023_
- Relax `isToken` to match artificial tokens that correctly follow the interface.
### 2.0.1
_January 28, 2023_
- Improve `types` declaration in `package.json`
### 2.0.0
_January 19, 2023_
- Simplify `Reader` interface (breaking)
- Change the `ParseError` interface, this is now a subclass of `Error` (breaking)
- Remove the `commentsAreTokens` option as `true` was the only desirable value (breaking)
- Improve performance
### 1.0.0
_November 14, 2022_
- Initial version
[Full CHANGELOG](https://github.com/csstools/postcss-plugins/tree/main/packages/css-tokenizer/CHANGELOG.md)

247

dist/index.d.ts

@@ -1,9 +0,238 @@

export type { CSSToken } from './interfaces/token';
export { ParseError } from './interfaces/error';
export { Reader } from './reader';
export { TokenType, NumberType, mirrorVariantType, mirrorVariant, isToken } from './interfaces/token';
export { stringify } from './stringify';
export { tokenize, tokenizer } from './tokenizer';
export { cloneTokens } from './util/clone-tokens';
export type { TokenAtKeyword, TokenBadString, TokenBadURL, TokenCDC, TokenCDO, TokenColon, TokenComma, TokenComment, TokenDelim, TokenDimension, TokenEOF, TokenFunction, TokenHash, TokenIdent, TokenNumber, TokenPercentage, TokenSemicolon, TokenString, TokenURL, TokenWhitespace, TokenOpenParen, TokenCloseParen, TokenOpenSquare, TokenCloseSquare, TokenOpenCurly, TokenCloseCurly, } from './interfaces/token';
export { mutateIdent, } from './util/mutations';
export declare function cloneTokens(tokens: Array<CSSToken>): Array<CSSToken>;
export declare type CodePointReader = {
representationStart: number;
representationEnd: number;
cursor: number;
codePointSource: Array<number>;
representationIndices: Array<number>;
source: string;
advanceCodePoint(n?: number): void;
readCodePoint(n?: number): number | false;
unreadCodePoint(n?: number): void;
resetRepresentation(): void;
};
export declare type CSSToken = TokenAtKeyword | TokenBadString | TokenBadURL | TokenCDC | TokenCDO | TokenColon | TokenComma | TokenComment | TokenDelim | TokenDimension | TokenEOF | TokenFunction | TokenHash | TokenIdent | TokenNumber | TokenPercentage | TokenSemicolon | TokenString | TokenURL | TokenWhitespace | TokenOpenParen | TokenCloseParen | TokenOpenSquare | TokenCloseSquare | TokenOpenCurly | TokenCloseCurly | TokenUnicodeRange;
export declare enum HashType {
Unrestricted = "unrestricted",
ID = "id"
}
export declare function isToken(x: any): x is CSSToken;
export declare function mirrorVariant(token: CSSToken): CSSToken | null;
export declare function mirrorVariantType(type: TokenType): TokenType | null;
export declare function mutateIdent(ident: TokenIdent, newValue: string): void;
export declare enum NumberType {
Integer = "integer",
Number = "number"
}
export declare class ParseError extends Error {
/** The index of the start character of the current token. */
sourceStart: number;
/** The index of the end character of the current token. */
sourceEnd: number;
/** The parser steps that preceded the error. */
parserState: Array<string>;
constructor(message: string, sourceStart: number, sourceEnd: number, parserState: Array<string>);
}
export declare class Reader implements CodePointReader {
cursor: number;
source: string;
codePointSource: Array<number>;
representationIndices: Array<number>;
length: number;
representationStart: number;
representationEnd: number;
constructor(source: string);
advanceCodePoint(n?: number): void;
readCodePoint(n?: number): number | false;
unreadCodePoint(n?: number): void;
resetRepresentation(): void;
}
export declare function stringify(...tokens: Array<CSSToken>): string;
export declare type Token<T extends TokenType, U> = [
/** The type of token */
T,
/** The token representation */
string,
/** Start position of representation */
number,
/** End position of representation */
number,
/** Extra data */
U
];
export declare type TokenAtKeyword = Token<TokenType.AtKeyword, {
value: string;
}>;
export declare type TokenBadString = Token<TokenType.BadString, undefined>;
export declare type TokenBadURL = Token<TokenType.BadURL, undefined>;
export declare type TokenCDC = Token<TokenType.CDC, undefined>;
export declare type TokenCDO = Token<TokenType.CDO, undefined>;
export declare type TokenCloseCurly = Token<TokenType.CloseCurly, undefined>;
export declare type TokenCloseParen = Token<TokenType.CloseParen, undefined>;
export declare type TokenCloseSquare = Token<TokenType.CloseSquare, undefined>;
export declare type TokenColon = Token<TokenType.Colon, undefined>;
export declare type TokenComma = Token<TokenType.Comma, undefined>;
export declare type TokenComment = Token<TokenType.Comment, undefined>;
export declare type TokenDelim = Token<TokenType.Delim, {
value: string;
}>;
export declare type TokenDimension = Token<TokenType.Dimension, {
value: number;
signCharacter?: '+' | '-';
unit: string;
type: NumberType;
}>;
export declare type TokenEOF = Token<TokenType.EOF, undefined>;
export declare type TokenFunction = Token<TokenType.Function, {
value: string;
}>;
export declare type TokenHash = Token<TokenType.Hash, {
value: string;
type: HashType;
}>;
export declare type TokenIdent = Token<TokenType.Ident, {
value: string;
}>;
export declare function tokenize(input: {
css: {
valueOf(): string;
};
unicodeRangesAllowed?: boolean;
}, options?: {
onParseError?: (error: ParseError) => void;
}): Array<CSSToken>;
export declare function tokenizer(input: {
css: {
valueOf(): string;
};
unicodeRangesAllowed?: boolean;
}, options?: {
onParseError?: (error: ParseError) => void;
}): {
nextToken: () => CSSToken | undefined;
endOfFile: () => boolean;
};
export declare type TokenNumber = Token<TokenType.Number, {
value: number;
signCharacter?: '+' | '-';
type: NumberType;
}>;
export declare type TokenOpenCurly = Token<TokenType.OpenCurly, undefined>;
export declare type TokenOpenParen = Token<TokenType.OpenParen, undefined>;
export declare type TokenOpenSquare = Token<TokenType.OpenSquare, undefined>;
export declare type TokenPercentage = Token<TokenType.Percentage, {
value: number;
signCharacter?: '+' | '-';
}>;
export declare type TokenSemicolon = Token<TokenType.Semicolon, undefined>;
export declare type TokenString = Token<TokenType.String, {
value: string;
}>;
export declare enum TokenType {
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#comment-diagram */
Comment = "comment",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-at-keyword-token */
AtKeyword = "at-keyword-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-bad-string-token */
BadString = "bad-string-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-bad-url-token */
BadURL = "bad-url-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-cdc-token */
CDC = "CDC-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-cdo-token */
CDO = "CDO-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-colon-token */
Colon = "colon-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-comma-token */
Comma = "comma-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-delim-token */
Delim = "delim-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-dimension-token */
Dimension = "dimension-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-eof-token */
EOF = "EOF-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-function-token */
Function = "function-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-hash-token */
Hash = "hash-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-ident-token */
Ident = "ident-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-percentage-token */
Number = "number-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-percentage-token */
Percentage = "percentage-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-semicolon-token */
Semicolon = "semicolon-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-string-token */
String = "string-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-url-token */
URL = "url-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-whitespace-token */
Whitespace = "whitespace-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#tokendef-open-paren */
OpenParen = "(-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#tokendef-close-paren */
CloseParen = ")-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#tokendef-open-square */
OpenSquare = "[-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#tokendef-close-square */
CloseSquare = "]-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#tokendef-open-curly */
OpenCurly = "{-token",
/** https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#tokendef-close-curly */
CloseCurly = "}-token",
/** https://drafts.csswg.org/css-syntax/#typedef-unicode-range-token */
UnicodeRange = "unicode-range-token"
}
export declare type TokenUnicodeRange = Token<TokenType.UnicodeRange, {
startOfRange: number;
endOfRange: number;
}>;
export declare type TokenURL = Token<TokenType.URL, {
value: string;
}>;
export declare type TokenWhitespace = Token<TokenType.Whitespace, undefined>;
export { }
{
"name": "@csstools/css-tokenizer",
"description": "Tokenize CSS",
"version": "2.2.1",
"version": "2.2.2",
"contributors": [

@@ -30,11 +30,14 @@ {

},
"type": "module",
"main": "dist/index.cjs",
"module": "dist/index.mjs",
"types": "dist/index.d.ts",
"exports": {
".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.mjs",
"require": "./dist/index.cjs",
"default": "./dist/index.mjs"
"import": {
"types": "./dist/index.d.ts",
"default": "./dist/index.mjs"
},
"require": {
"default": "./dist/index.cjs"
}
}

@@ -48,14 +51,2 @@ },

],
"devDependencies": {
"@rmenke/css-tokenizer-tests": "^1.1.1",
"postcss-parser-tests": "^8.8.0"
},
"scripts": {
"benchmark": "node ./test/benchmark.mjs",
"build": "rollup -c ../../rollup/default.mjs",
"lint": "node ../../.github/bin/format-package-json.mjs",
"prepublishOnly": "npm run build && npm run test",
"stryker": "stryker run --logLevel error",
"test": "node ./test/test.mjs && node ./test/_import.mjs && node ./test/_require.cjs"
},
"homepage": "https://github.com/csstools/postcss-plugins/tree/main/packages/css-tokenizer#readme",

@@ -71,6 +62,3 @@ "repository": {

"tokenizer"
],
"volta": {
"extends": "../../package.json"
}
]
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc