Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

cspell-grammar

Package Overview
Dependencies
Maintainers
1
Versions
155
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

cspell-grammar - npm Package Compare versions

Comparing version 0.1.0 to 0.1.1

dist/grammar/grammarFiles.d.ts

13

dist/grammar/grammar.d.ts

@@ -1,3 +0,4 @@

import { GrammarDefinition } from './grammarDefinition';
import { GrammarDefinition, Pattern } from './grammarDefinition';
import { Token } from './tokenize';
export { Token, tokenizeLine } from './tokenize';
export interface Tokenizer {

@@ -14,2 +15,3 @@ tokenize(line: string): Token[];

}
export declare type ScopeResolver = (scopeNameRef: string) => Pattern | undefined;
export declare class Grammar {

@@ -20,3 +22,12 @@ private grammarDef;

tokenizeLines(input: Iterable<string>): IterableIterator<TokenizeLineResult>;
readonly grammar: GrammarDefinition;
/**
*
* @param scopeNameRef the name of the scope to look up in the repository. It is of the form
* `scope.ext#reference`
*/
getReferencePattern(scopeNameRef: string): Pattern | undefined;
private getPattern(ref);
resolveImports(resolver: ScopeResolver): void;
static createFromFile(filename: string): Promise<Grammar>;
}

@@ -5,5 +5,13 @@ "use strict";

const fs = require("fs-extra");
const pattern_1 = require("./pattern");
var tokenize_2 = require("./tokenize");
exports.tokenizeLine = tokenize_2.tokenizeLine;
class Grammar {
constructor(grammarDef) {
this.grammarDef = grammarDef;
// fixup the repository to include $self and $base and ensure it exists.
const base = { $self: grammarDef, $base: grammarDef };
const repository = grammarDef.repository || base;
Object.assign(repository, base);
grammarDef.repository = repository;
}

@@ -29,2 +37,53 @@ tokenizer() {

}
get grammar() {
return this.grammarDef;
}
/**
*
* @param scopeNameRef the name of the scope to look up in the repository. It is of the form
* `scope.ext#reference`
*/
getReferencePattern(scopeNameRef) {
const [scope, reference = '$self'] = scopeNameRef.split('#', 2);
if (scope !== this.grammarDef.scopeName) {
return undefined;
}
return this.getPattern(reference);
}
getPattern(ref) {
const pattern = this.grammarDef.repository[ref];
return pattern || undefined;
}
resolveImports(resolver) {
const self = this;
function internalResolve(scopeNameRef) {
const [scope, reference = ''] = scopeNameRef.split('#', 2);
return (!scope || scope === '$self' || scope === '$base')
? self.getPattern(reference || scope)
: resolver(scopeNameRef);
}
function resolvePatternInclude(pat) {
if (!pat._reference) {
const refMaybe = internalResolve(pat.include);
if (!refMaybe) {
console.log(`Cannot resolve reference: (${self.grammarDef.scopeName}:${pat.include})`);
}
const ref = refMaybe || { name: pat.include };
pat._reference = ref;
}
}
function resolvePatterns(pat) {
if (pattern_1.isPatternInclude(pat)) {
resolvePatternInclude(pat);
}
else if (pattern_1.isPatternPatterns(pat)) {
(pat.patterns || []).forEach(p => resolvePatterns(p));
}
}
const repository = this.grammarDef.repository;
resolvePatterns(this.grammarDef);
Object.keys(repository)
.map(key => repository[key])
.forEach(resolvePatterns);
}
static async createFromFile(filename) {

@@ -31,0 +90,0 @@ const json = await fs.readFile(filename, 'utf8');

3

dist/grammar/grammarDefinition.d.ts

@@ -9,3 +9,3 @@ /**

patterns: Pattern[];
repository?: Repository;
repository: Repository;
}

@@ -39,2 +39,3 @@ export declare type Pattern = PatternMatch | PatternBeginEnd | PatternInclude | PatternPatterns | PatternName;

comment?: string;
_reference?: Pattern;
}

@@ -41,0 +42,0 @@ export interface PatternPatterns extends PatternBase {

export * from './grammarDefinition';
export * from './grammar';
export * from './tokenizeFile';
export * from './registry';

@@ -7,2 +7,4 @@ "use strict";

__export(require("./grammar"));
__export(require("./tokenizeFile"));
__export(require("./registry"));
//# sourceMappingURL=index.js.map

@@ -10,1 +10,2 @@ import { Capture, Pattern, PatternBeginEnd, PatternInclude, PatternMatch, PatternName, PatternPatterns } from './grammarDefinition';

export declare function endCaptures(pattern: Pattern): Capture | undefined;
export declare function patternToString(pattern: PatternMatch | PatternBeginEnd | PatternInclude | PatternPatterns | PatternName): string;

@@ -59,2 +59,19 @@ "use strict";

exports.endCaptures = endCaptures;
function patternToString(pattern) {
if (isPatternMatch(pattern)) {
return `PatternMatch: ${pattern.name || '?'} (${pattern.match.toString()})`;
}
if (isPatternBeginEnd(pattern)) {
return `PatternBeginEnd: ${pattern.name || '?'} (${pattern.begin.toString()})`;
}
if (isPatternPatterns(pattern)) {
return `PatternPatterns: ${pattern.name || '?'} [${pattern.patterns.length}]`;
}
if (isPatternInclude(pattern)) {
return `PatternInclude: ${pattern.name || '?'} (${pattern.include})`;
}
// pattern name
return `PatternName: ${pattern.name || '?'}`;
}
exports.patternToString = patternToString;
//# sourceMappingURL=pattern.js.map
"use strict";
// import { GrammarDefinition } from './grammarDefinition';
// export class Registry {
// private registry: Map<string, GrammarDefinition> = new Map();
// constructor(private resolveScope: (scopeName: string) => Promise<GrammarDefinition>) {}
// }
Object.defineProperty(exports, "__esModule", { value: true });
const grammar_1 = require("./grammar");
const grammarFiles_1 = require("./grammarFiles");
class Registry {
constructor(grammars) {
this.grammars = grammars;
this.scopeMap = new Map();
this.fileTypeMap = new Map();
this.grammars.forEach(g => this.scopeMap.set(g.grammar.scopeName, g));
this.grammars.forEach(g => this.fileTypeMap.set(g.grammar.scopeName.replace(/^.*\./, ''), g));
this.grammars.forEach(g => (g.grammar.fileTypes || [])
.map(t => this.normalizeFileType(t))
.forEach(t => this.fileTypeMap.set(t, g)));
this.grammars.forEach(g => g.resolveImports(scopeNameRef => {
const [scope] = scopeNameRef.split('#', 2);
const refGrammar = this.scopeMap.get(scope);
return refGrammar && refGrammar.getReferencePattern(scopeNameRef);
}));
}
normalizeFileType(fileType) {
return fileType.replace(/^\./, '');
}
getGrammarForFileType(fileType) {
return this.fileTypeMap.get(this.normalizeFileType(fileType));
}
static async create(grammarFileNames) {
const pGrammars = grammarFileNames
.map(grammarFiles_1.loadGrammar)
.map(async (def) => new grammar_1.Grammar(await def));
const grammars = await Promise.all(pGrammars);
return new Registry(grammars);
}
}
exports.Registry = Registry;
//# sourceMappingURL=registry.js.map

@@ -8,2 +8,6 @@ "use strict";

const maxDepth = 100;
const useLogging = false;
function logInfo(message) {
useLogging && console.log(message);
}
function tokenizeLine(text, rule) {

@@ -83,2 +87,17 @@ const tokens = [];

function matchRule(text, offset, rule) {
let result;
try {
logInfo(`${'.'.repeat(rule.depth)}+${rule.depth} ${pattern_1.patternToString(rule.pattern)} test`);
result = matchRuleInner(text, offset, rule);
return result;
}
finally {
const msg = result
? (result.match ? `match ${result.match.length}` : 'non-match')
: 'failed';
logInfo(`${'.'.repeat(rule.depth)}+${rule.depth} ${pattern_1.patternToString(rule.pattern)} result ${msg}`);
}
}
exports.matchRule = matchRule;
function matchRuleInner(text, offset, rule) {
const { pattern, depth, grammarDef } = rule;

@@ -88,3 +107,3 @@ if (pattern_1.isPatternInclude(pattern)) {

const name = pattern.include.slice(1);
const result = grammarDef.repository[name];
const result = pattern._reference || grammarDef.repository[name];
if (result) {

@@ -104,2 +123,3 @@ return matchRule(text, offset, {

// We do not support including other grammars yet.
console.log(`Unknown include: (${name})`);
}

@@ -161,3 +181,2 @@ return { rule }; // Unsupported include, match nothing.

}
exports.matchRule = matchRule;
function findBoundingRule(rule) {

@@ -164,0 +183,0 @@ while (rule.parent && !pattern_1.isPatternBeginEnd(rule.pattern)) {

export * from './grammar';
export * from './visualize';

@@ -7,2 +7,3 @@ "use strict";

__export(require("./grammar"));
__export(require("./visualize"));
//# sourceMappingURL=index.js.map

@@ -0,1 +1,5 @@

export * from './types';
export * from './tokenColorizer';
export * from './tokenizeToMd';
import * as TokenizeToAnsi from './tokenizeToAnsi';
export declare const tokenizeToAnsi: typeof TokenizeToAnsi;

@@ -7,2 +7,5 @@ "use strict";

__export(require("./tokenColorizer"));
__export(require("./tokenizeToMd"));
const TokenizeToAnsi = require("./tokenizeToAnsi");
exports.tokenizeToAnsi = TokenizeToAnsi;
//# sourceMappingURL=index.js.map
import { Chalk } from 'chalk';
import { Scope } from '../grammar';
import { Token } from '../grammar/tokenize';
import { ScopeColorizer, LineColorizer } from './types';
export declare type ColorTextFn = (text: string) => string;
export declare type ColorMap = [RegExp, ColorTextFn][];
export declare type ScopeColorizer = (text: string, scopes: Scope) => string;
export declare type LineColorizer = (text: string, tokens: Token[]) => string;
export interface ScopeColorizerDefinition {

@@ -9,0 +6,0 @@ colorMap: ColorMap;

@@ -1,5 +0,5 @@

import { Grammar, TokenizeLineResult, Scope } from '../grammar';
export declare type ScopeColorizer = (text: string, scopes: Scope) => string;
import { Grammar, TokenizeLineResult } from '../grammar';
import { ScopeColorizer } from './types';
export declare function tokenizeLine(colorize: ScopeColorizer, tokenizedLine: TokenizeLineResult): IterableIterator<string>;
export declare function tokenizeText(grammar: Grammar, colorizer: ScopeColorizer, text: string): IterableIterator<string>;
export declare function tokenizeFile(grammar: Grammar, colorizer: ScopeColorizer, filename: string, encoding?: string): Promise<string>;
{
"name": "cspell-grammar",
"version": "0.1.0",
"version": "0.1.1",
"description": "A tmLanguage based Grammar Parser for cSpell",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
"bin": {
"cspell-grammar": "./dist/app.js"
},
"files": [

@@ -30,5 +27,5 @@ "dist/**",

"test-watch": "npm run build && mocha --exit --require ts-node/register --watch --recursive \"src/**/*.test.ts\"",
"prepublish": "npm run lint && npm run clean-build && npm test",
"coverage-coveralls": "nyc report --reporter=text-lcov | coveralls",
"travis-coverage": "npm run generate-code-coverage && npm run coverage-coveralls",
"prepare": "npm run clean-build",
"prepublish-check": "npm run lint",
"coverage-report": "nyc report --reporter=text-lcov > ../../coverage/$npm_package_name.lcov",
"test-unit": "mocha --recursive \"dist/**/*.test.js\"",

@@ -54,2 +51,3 @@ "test": "npm run test-unit"

"@types/fs-extra": "^5.0.2",
"@types/js-yaml": "^3.11.1",
"@types/mocha": "^5.2.0",

@@ -59,4 +57,3 @@ "@types/node": "^8.10.12",

"chai": "^4.1.2",
"coveralls": "^3.0.1",
"lerna": "^2.11.0",
"cspell-grammar-syntax": "^0.1.1",
"mocha": "^5.1.1",

@@ -70,6 +67,6 @@ "nyc": "^11.7.1",

"dependencies": {
"chalk": "^2.4.1",
"commander": "^2.15.0",
"comment-json": "^1.1.3",
"fast-plist": "^0.1.2",
"fs-extra": "^6.0.0",
"js-yaml": "^3.11.0",
"xregexp": "^4.1.1"

@@ -76,0 +73,0 @@ },

@@ -1,5 +0,8 @@

import { GrammarDefinition } from './grammarDefinition';
import { GrammarDefinition, Pattern, PatternInclude } from './grammarDefinition';
import { Token, tokenizeLine, grammarToRule } from './tokenize';
import * as fs from 'fs-extra';
import { isPatternInclude, isPatternPatterns } from './pattern';
export { Token, tokenizeLine } from './tokenize';
export interface Tokenizer {

@@ -19,4 +22,12 @@ tokenize(line: string): Token[];

export type ScopeResolver = (scopeNameRef: string) => Pattern | undefined;
export class Grammar {
constructor(private grammarDef: GrammarDefinition) {}
constructor(private grammarDef: GrammarDefinition) {
// fixup the repository to include $self and $base and ensure it exists.
const base = { $self: grammarDef, $base: grammarDef };
const repository = grammarDef.repository || base;
Object.assign(repository, base);
grammarDef.repository = repository;
}

@@ -45,2 +56,61 @@ tokenizer(): Tokenizer {

get grammar(): GrammarDefinition {
return this.grammarDef;
}
/**
*
* @param scopeNameRef the name of the scope to look up in the repository. It is of the form
* `scope.ext#reference`
*/
getReferencePattern(scopeNameRef: string): Pattern | undefined {
const [scope, reference = '$self'] = scopeNameRef.split('#', 2);
if (scope !== this.grammarDef.scopeName) {
return undefined;
}
return this.getPattern(reference);
}
private getPattern(ref: string): Pattern | undefined {
const pattern = this.grammarDef.repository[ref];
return pattern || undefined;
}
resolveImports(resolver: ScopeResolver) {
const self = this;
function internalResolve(scopeNameRef: string): Pattern | undefined {
const [scope, reference = ''] = scopeNameRef.split('#', 2);
return (!scope || scope === '$self' || scope === '$base' )
? self.getPattern(reference || scope)
: resolver(scopeNameRef);
}
function resolvePatternInclude(pat: PatternInclude) {
if (!pat._reference) {
const refMaybe: Pattern | undefined = internalResolve(pat.include);
if (!refMaybe) {
console.log(`Cannot resolve reference: (${self.grammarDef.scopeName}:${pat.include})`);
}
const ref: Pattern = refMaybe || { name: pat.include };
pat._reference = ref;
}
}
function resolvePatterns(pat: Pattern) {
if (isPatternInclude(pat)) {
resolvePatternInclude(pat);
} else if (isPatternPatterns(pat)) {
(pat.patterns || []).forEach(p => resolvePatterns(p));
}
}
const repository = this.grammarDef.repository;
resolvePatterns(this.grammarDef);
Object.keys(repository)
.map(key => repository[key])
.forEach(resolvePatterns);
}
static async createFromFile(filename: string): Promise<Grammar> {

@@ -47,0 +117,0 @@ const json = await fs.readFile(filename, 'utf8');

@@ -10,3 +10,3 @@ /**

patterns: Pattern[]; // Patterns to use for the grammar
repository?: Repository; // Dictionary of patterns
repository: Repository; // Dictionary of patterns
}

@@ -44,2 +44,3 @@

comment?: string; // comment to help with understanding
_reference?: Pattern; // resolved include reference. Not persisted in grammar files.
}

@@ -71,2 +72,6 @@

[index: string]: Pattern;
// Special repository patterns
// These are not persisted.
// $self?: GrammarDefinition;
// $base?: GrammarDefinition;
}

@@ -73,0 +78,0 @@

export * from './grammarDefinition';
export * from './grammar';
export * from './tokenizeFile';
export * from './registry';

@@ -66,1 +66,18 @@ import {

}
export function patternToString(pattern: PatternMatch | PatternBeginEnd | PatternInclude | PatternPatterns | PatternName): string {
if (isPatternMatch(pattern)) {
return `PatternMatch: ${pattern.name || '?'} (${pattern.match.toString()})`;
}
if (isPatternBeginEnd(pattern)) {
return `PatternBeginEnd: ${pattern.name || '?'} (${pattern.begin.toString()})`;
}
if (isPatternPatterns(pattern)) {
return `PatternPatterns: ${pattern.name || '?'} [${pattern.patterns.length}]`;
}
if (isPatternInclude(pattern)) {
return `PatternInclude: ${pattern.name || '?'} (${pattern.include})`;
}
// pattern name
return `PatternName: ${pattern.name || '?'}`;
}

@@ -1,7 +0,36 @@

// import { GrammarDefinition } from './grammarDefinition';
import { Grammar } from './grammar';
import { loadGrammar } from './grammarFiles';
// export class Registry {
// private registry: Map<string, GrammarDefinition> = new Map();
export class Registry {
private scopeMap = new Map<string, Grammar>();
private fileTypeMap = new Map<string, Grammar>();
// constructor(private resolveScope: (scopeName: string) => Promise<GrammarDefinition>) {}
// }
constructor(private grammars: Grammar[]) {
this.grammars.forEach(g => this.scopeMap.set(g.grammar.scopeName, g));
this.grammars.forEach(g => this.fileTypeMap.set(g.grammar.scopeName.replace(/^.*\./, ''), g));
this.grammars.forEach(g => (g.grammar.fileTypes || [])
.map(t => this.normalizeFileType(t))
.forEach(t => this.fileTypeMap.set(t, g)));
this.grammars.forEach(g => g.resolveImports(scopeNameRef => {
const [scope] = scopeNameRef.split('#', 2);
const refGrammar = this.scopeMap.get(scope);
return refGrammar && refGrammar.getReferencePattern(scopeNameRef);
}));
}
public normalizeFileType(fileType: string) {
return fileType.replace(/^\./, '');
}
public getGrammarForFileType(fileType: string) {
return this.fileTypeMap.get(this.normalizeFileType(fileType));
}
static async create(grammarFileNames: string[]): Promise<Registry> {
const pGrammars = grammarFileNames
.map(loadGrammar)
.map(async def => new Grammar(await def));
const grammars = await Promise.all(pGrammars);
return new Registry(grammars);
}
}
import { GrammarDefinition, Pattern, RegexOrString, Capture, PatternName } from './grammarDefinition';
import { isPatternInclude, isPatternMatch, isPatternBeginEnd, scope, captures, endCaptures, isPatternName } from './pattern';
import { isPatternInclude, isPatternMatch, isPatternBeginEnd, scope, captures, endCaptures, isPatternName, patternToString } from './pattern';
import * as XRegExp from 'xregexp';

@@ -8,3 +8,8 @@ import { escapeMatch, MatchOffsetResult, matchesToOffsets } from './regexpUtil';

const maxDepth = 100;
const useLogging = false;
function logInfo(message: string) {
useLogging && console.log(message);
}
export interface TokenizeLineResult {

@@ -119,2 +124,17 @@ tokens: Token[];

export function matchRule(text: string, offset: number, rule: Rule): MatchResult {
let result: MatchResult | undefined;
try {
logInfo(`${'.'.repeat(rule.depth)}+${rule.depth} ${patternToString(rule.pattern)} test`);
result = matchRuleInner(text, offset, rule);
return result;
} finally {
const msg = result
? (result.match ? `match ${result.match.length}` : 'non-match')
: 'failed';
logInfo(`${'.'.repeat(rule.depth)}+${rule.depth} ${patternToString(rule.pattern)} result ${msg}`);
}
}
function matchRuleInner(text: string, offset: number, rule: Rule): MatchResult {
const { pattern, depth, grammarDef } = rule;

@@ -124,3 +144,3 @@ if ( isPatternInclude(pattern) ) {

const name = pattern.include.slice(1);
const result = grammarDef.repository[name];
const result = pattern._reference || grammarDef.repository[name];
if (result) {

@@ -140,2 +160,3 @@ return matchRule(text, offset, {

// We do not support including other grammars yet.
console.log(`Unknown include: (${name})`);
}

@@ -142,0 +163,0 @@ return { rule }; // Unsupported include, match nothing.

export * from './grammar';
export * from './visualize';

@@ -0,1 +1,5 @@

export * from './types';
export * from './tokenColorizer';
export * from './tokenizeToMd';
import * as TokenizeToAnsi from './tokenizeToAnsi';
export const tokenizeToAnsi = TokenizeToAnsi;
import chalk, { Chalk } from 'chalk';
import { create } from '../util/cacheMap';
import { Scope } from '../grammar';
import { Token } from '../grammar/tokenize';
import { Scope, Token } from '../grammar';
import { ScopeColorizer, LineColorizer } from './types';

@@ -10,5 +10,2 @@ export type ColorTextFn = (text: string) => string;

export type ScopeColorizer = (text: string, scopes: Scope) => string;
export type LineColorizer = (text: string, tokens: Token[]) => string;
export interface ScopeColorizerDefinition {

@@ -15,0 +12,0 @@ colorMap: ColorMap;

@@ -1,2 +0,2 @@

import { Grammar, TokenizeLineResult, Scope } from '../grammar';
import { Grammar, TokenizeLineResult } from '../grammar';
import * as fs from 'fs-extra';

@@ -9,3 +9,3 @@ import * as path from 'path';

export type ScopeColorizer = (text: string, scopes: Scope) => string;
import { ScopeColorizer } from './types';

@@ -12,0 +12,0 @@ function *splitLine(width: number, text: string): IterableIterator<string> {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc