Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@wgslx/wgslx

Package Overview
Dependencies
Maintainers
1
Versions
12
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@wgslx/wgslx - npm Package Compare versions

Comparing version 0.0.3 to 0.0.4

dist/loader.d.ts

21

dist/patterns.js

@@ -5,3 +5,3 @@ "use strict";

function createRegExpTextMatcher(...regexes) {
const stickRegexes = regexes.map(r => {
const stickRegexes = regexes.map((r) => {
let flags = 'y';

@@ -55,3 +55,6 @@ if (r.dotAll)

exports.INT_HEX_LITERAL_REGEX = [/0[xX][0-9a-fA-F]+[iu]?/];
exports.INT_LITERAL_REGEX = [...exports.INT_DEC_LITERAL_REGEX, ...exports.INT_HEX_LITERAL_REGEX];
exports.INT_LITERAL_REGEX = [
...exports.INT_DEC_LITERAL_REGEX,
...exports.INT_HEX_LITERAL_REGEX,
];
exports.FLOAT_DEC_LITERAL_REGEX = [

@@ -69,4 +72,11 @@ /0[fh]/,

];
exports.FLOAT_LITERAL_REGEX = [...exports.FLOAT_DEC_LITERAL_REGEX, ...exports.FLOAT_HEX_LITERAL_REGEX];
exports.LITERAL_REGEX = [...exports.BOOL_LITERAL_REGEX, ...exports.INT_DEC_LITERAL_REGEX, ...exports.FLOAT_LITERAL_REGEX];
exports.FLOAT_LITERAL_REGEX = [
...exports.FLOAT_DEC_LITERAL_REGEX,
...exports.FLOAT_HEX_LITERAL_REGEX,
];
exports.LITERAL_REGEX = [
...exports.BOOL_LITERAL_REGEX,
...exports.INT_DEC_LITERAL_REGEX,
...exports.FLOAT_LITERAL_REGEX,
];
exports.matchLiteral = createRegExpTextMatcher(...exports.LITERAL_REGEX);

@@ -82,3 +92,4 @@ exports.IDENT_PATTERN_TOKEN_REGEX = /([_\p{XID_Start}][\p{XID_Continue}]+)|([\p{XID_Start}])/u;

let position = start;
if (text.substring(position, position + exports.BLOCK_COMMENT_OPEN.length) !== exports.BLOCK_COMMENT_OPEN) {
if (text.substring(position, position + exports.BLOCK_COMMENT_OPEN.length) !==
exports.BLOCK_COMMENT_OPEN) {
return undefined;

@@ -85,0 +96,0 @@ }

@@ -1,2 +0,4 @@

import { Token } from "./token";
import { Token } from './token';
export declare function postprocess(token: Token): any;
export declare function minify(token: Token): void;
export declare function generateSourceMap(token: Token, file?: string, sourceRoot?: string): string;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.minify = void 0;
exports.generateSourceMap = exports.minify = exports.postprocess = void 0;
const syntax_1 = require("./syntax");
const traversal_1 = require("./traversal");
const source_map_1 = require("source-map");
const util_1 = require("./util");
function postprocess(token) {
let text = token.toString();
text = text.replaceAll(util_1.TEMPLATE_START, '<').replaceAll(util_1.TEMPLATE_END, '>');
return text;
}
exports.postprocess = postprocess;
function minify(token) {

@@ -10,6 +18,32 @@ (0, traversal_1.assertType)(token, syntax_1.translationUnit);

predicate: (0, traversal_1.ofType)(syntax_1.functionDecl, syntax_1.compoundStatement, syntax_1.ident),
preorderCallback: (token) => {
preorderCallback: (token) => { },
});
}
exports.minify = minify;
function generateSourceMap(token, file, sourceRoot) {
const generator = new source_map_1.SourceMapGenerator({ file, sourceRoot });
(0, traversal_1.traverse)(token, {
preorderCallback(token, ancestors, index) {
if (!token.source || !token.destination)
return;
const source = token.source.split(':');
const destination = token.destination.split(':');
const file = source[2];
const original = {
line: parseInt(source[0]),
column: parseInt(source[1]),
};
const generated = {
line: parseInt(destination[0]),
column: parseInt(destination[1]),
};
generator.addMapping({
source: file,
original,
generated,
});
},
});
return generator.toString();
}
exports.minify = minify;
exports.generateSourceMap = generateSourceMap;

@@ -6,4 +6,4 @@ interface TemplateList {

export declare function preprocess(text: string): string;
export declare function discoverComments(text: string): void;
export declare function stripComments(text: string): string;
export declare function discoverTemplates(text: string): TemplateList[];
export {};
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.discoverTemplates = exports.discoverComments = exports.preprocess = void 0;
exports.discoverTemplates = exports.stripComments = exports.preprocess = void 0;
const patterns_1 = require("./patterns");
const TEMPLATE_START = '\u276c';
const TEMPLATE_END = '\u276d';
const util_1 = require("./util");
function preprocess(text) {
const templateLists = discoverTemplates(text);
const strippedText = stripComments(text);
const templateLists = discoverTemplates(strippedText);
text = text.replaceAll(/[<>]/g, (character, offset) => {
switch (character) {
case '<':
return templateLists.some(t => t.startPosition === offset)
? TEMPLATE_START
return templateLists.some((t) => t.startPosition === offset)
? util_1.TEMPLATE_START
: '<';
case '>':
return templateLists.some(t => t.endPosition === offset)
? TEMPLATE_END
return templateLists.some((t) => t.endPosition === offset)
? util_1.TEMPLATE_END
: '>';

@@ -25,5 +25,75 @@ }

exports.preprocess = preprocess;
function discoverComments(text) {
function firstIndexOf(text, position, ...values) {
let minIndex = Number.MAX_SAFE_INTEGER;
let minChar = '';
for (let value of values) {
const index = text.indexOf(value, position);
if (index !== -1 && index < minIndex) {
minIndex = index;
minChar = value;
}
}
if (minIndex === Number.MAX_SAFE_INTEGER) {
return [-1, ''];
}
return [minIndex, minChar];
}
exports.discoverComments = discoverComments;
function stripComments(text) {
const lines = text.split(patterns_1.LINE_BREAK_REGEX);
for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) {
const [firstCommentIndex, firstCommentType] = firstIndexOf(lines[lineIndex], 0, '//', '/*', '*/');
if (firstCommentIndex === -1) {
lines[lineIndex] = lines[lineIndex].trimEnd();
continue;
}
if (firstCommentType === '*/') {
throw new Error(`Line ${lineIndex + 1}: Unexpected block comment close.`);
}
if (firstCommentType === '//') {
lines[lineIndex] = lines[lineIndex].substring(0, firstCommentIndex);
continue;
}
let depth = 1;
let startLine = lineIndex;
let column = firstCommentIndex + 2;
while (depth !== 0) {
const [nextBlockIndex, nextBlockType] = firstIndexOf(lines[lineIndex], column, '/*', '*/');
if (nextBlockIndex === -1) {
if (lineIndex === lines.length - 1) {
throw new Error(`Line ${startLine}: Unclosed block comment.`);
}
lineIndex += 1;
column = 0;
continue;
}
if (nextBlockType === '/*') {
depth += 1;
column = nextBlockIndex + 2;
continue;
}
if (nextBlockType === '*/') {
depth -= 1;
column = nextBlockIndex + 2;
continue;
}
throw new Error('Unreachable code.');
}
if (startLine == lineIndex) {
lines[lineIndex] =
lines[lineIndex].substring(0, firstCommentIndex) +
' '.repeat(column - firstCommentIndex) +
lines[lineIndex].substring(column);
lineIndex -= 1;
continue;
}
lines[startLine] = lines[startLine].substring(0, firstCommentIndex);
for (let j = startLine + 1; j < lineIndex; j++) {
lines[j] = '';
}
lines[lineIndex] = ' '.repeat(column) + lines[lineIndex].substring(column);
lineIndex -= 1;
}
return lines.join('\n');
}
exports.stripComments = stripComments;
function discoverTemplates(text) {

@@ -48,3 +118,3 @@ const discoveredTemplateLists = [];

} while (matched === true && matchers.length > 1);
return (startPosition === currentPosition)
return startPosition === currentPosition
? undefined

@@ -75,3 +145,2 @@ : text.substring(startPosition, currentPosition);

}
;
}

@@ -82,3 +151,4 @@ continue;

if (pendingCandidatesStack.length > 0 &&
pendingCandidatesStack[pendingCandidatesStack.length - 1].depth === nestingDepth) {
pendingCandidatesStack[pendingCandidatesStack.length - 1].depth ===
nestingDepth) {
const pending = pendingCandidatesStack.pop();

@@ -121,4 +191,5 @@ discoveredTemplateLists.push({

if (startsWithAdvance('&&', '||')) {
while (pendingCandidatesStack.length
&& pendingCandidatesStack[pendingCandidatesStack.length - 1].depth >= nestingDepth) {
while (pendingCandidatesStack.length &&
pendingCandidatesStack[pendingCandidatesStack.length - 1].depth >=
nestingDepth) {
pendingCandidatesStack.pop();

@@ -125,0 +196,0 @@ }

@@ -1,4 +0,4 @@

import { TextMatcher } from "./patterns";
import { Cursor, Sequence } from "./sequence";
import { Token } from "./token";
import { TextMatcher } from './patterns';
import { Cursor, Sequence } from './sequence';
import { Token } from './token';
export interface RuleMatch {

@@ -5,0 +5,0 @@ token?: Token;

@@ -14,3 +14,3 @@ "use strict";

else {
token = token_1.Token.group(token.map(r => r.token).filter(util_1.isValued));
token = token_1.Token.group(token.map((r) => r.token).filter(util_1.isValued));
}

@@ -101,3 +101,3 @@ }

function rulifyAll(rules) {
return rules.map(r => rulifyOne(r));
return rules.map((r) => rulifyOne(r));
}

@@ -104,0 +104,0 @@ class LiteralRule extends Rule {

@@ -1,2 +0,2 @@

import { TextMatcher } from "./patterns";
import { TextMatcher } from './patterns';
export interface Segment {

@@ -3,0 +3,0 @@ text: string;

@@ -1,7 +0,13 @@

export interface TokenObject {
export interface TokenJson {
text?: string;
symbol?: string;
source?: string;
children?: TokenObject[];
destination?: string;
children?: TokenJson[];
}
export interface TokenRange {
file: string;
line: number;
column: number;
}
export declare class Token {

@@ -12,2 +18,3 @@ id: number;

source?: string;
destination?: string;
children?: Token[];

@@ -17,3 +24,3 @@ maybe: boolean;

clone(): Token;
toObject(): TokenObject;
toObject(): TokenJson;
toString(compact?: boolean): any;

@@ -20,0 +27,0 @@ static text(text: string, source: string): Token;

@@ -9,2 +9,3 @@ "use strict";

source;
destination;
children;

@@ -20,2 +21,3 @@ maybe = false;

token.source = this.source;
token.destination = this.destination;
token.children = this.children;

@@ -29,2 +31,4 @@ token.maybe = this.maybe;

object.source = this.source;
if (this.destination)
object.destination = this.destination;
if (this.text)

@@ -35,3 +39,3 @@ object.text = this.text;

if (this.children)
object.children = this.children.map(t => t.toObject());
object.children = this.children.map((t) => t.toObject());
return object;

@@ -42,4 +46,4 @@ }

const text = this.children
.map(m => m?.toString())
.filter(t => t)
.map((m) => m?.toString())
.filter((t) => t)
.join(' ');

@@ -46,0 +50,0 @@ if (compact) {

@@ -1,3 +0,3 @@

import { SymbolRule } from "./rules";
import { Token } from "./token";
import { SymbolRule } from './rules';
import { Token } from './token';
type FlexSymbol = string | SymbolRule;

@@ -4,0 +4,0 @@ export declare function assertType(token: Token, symbol: FlexSymbol): void;

@@ -11,3 +11,3 @@ "use strict";

function symbolNames(symbols) {
return symbols.map(s => symbolName(s));
return symbols.map((s) => symbolName(s));
}

@@ -25,3 +25,3 @@ function assertType(token, symbol) {

const symbols = token.symbol;
return names.some(n => symbols.includes(n));
return names.some((n) => symbols.includes(n));
}

@@ -28,0 +28,0 @@ return false;

@@ -0,1 +1,3 @@

export declare const TEMPLATE_START = "\u276C";
export declare const TEMPLATE_END = "\u276D";
export declare function isValued<TValue>(value: TValue | null | undefined): value is TValue;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isValued = void 0;
exports.isValued = exports.TEMPLATE_END = exports.TEMPLATE_START = void 0;
exports.TEMPLATE_START = '\u276c';
exports.TEMPLATE_END = '\u276d';
function isValued(value) {

@@ -5,0 +7,0 @@ return value !== null && value !== undefined;

{
"name": "@wgslx/wgslx",
"version": "0.0.3",
"version": "0.0.4",
"description": "Extended WebGPU shading language tools",

@@ -13,2 +13,3 @@ "keywords": [

"build": "npx tsc --build",
"publish": "npm run test && npm run build",
"generate": "npx ts-node scripts/generate.ts",

@@ -15,0 +16,0 @@ "test": "npx jest"

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc