Comparing version 1.1.12 to 1.1.13
{ | ||
"spellright.language": "en", | ||
"spellright.language": [ | ||
"en" | ||
], | ||
"spellright.documentTypes": [ | ||
@@ -8,5 +10,4 @@ "markdown", | ||
"antlr", | ||
"typescript", | ||
"javascript" | ||
] | ||
} |
@@ -55,1 +55,5 @@ ANTLR4-C3 Project Contributors Certification of Origin and Rights | ||
2019/04/06, kaidjohnson, Kai Johnson, kaidjohnson@gmail.com | ||
2019/06/26, bbourbie, Benoît Bourbié, bbourbie@splunk.com | ||
2019/08/11, jphilipps, Jonathan Philipps, jphilipps@mail.de | ||
2020/02/10, kyle-painter, Kyle Painter, kpainter101@gmail.com | ||
2020/08/06, tamcgoey, Thomas McGoey-Smith, thomas@sourdough.dev |
'use strict'; | ||
function __export(m) { | ||
for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p]; | ||
} | ||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | ||
}) : (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
o[k2] = m[k]; | ||
})); | ||
var __exportStar = (this && this.__exportStar) || function(m, exports) { | ||
for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) __createBinding(exports, m, p); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
__export(require("./src/CodeCompletionCore")); | ||
__export(require("./src/SymbolTable")); | ||
__exportStar(require("./src/CodeCompletionCore"), exports); | ||
__exportStar(require("./src/SymbolTable"), exports); | ||
//# sourceMappingURL=index.js.map |
import { Parser, ParserRuleContext } from 'antlr4ts'; | ||
export declare type TokenList = number[]; | ||
export declare type CandidateRule = { | ||
startTokenIndex: number; | ||
ruleList: RuleList; | ||
}; | ||
export declare type RuleWithStartToken = { | ||
startTokenIndex: number; | ||
ruleIndex: number; | ||
}; | ||
export declare type RuleWithStartTokenList = RuleWithStartToken[]; | ||
export declare type RuleList = number[]; | ||
export declare class CandidatesCollection { | ||
tokens: Map<number, TokenList>; | ||
rules: Map<number, RuleList>; | ||
rules: Map<number, CandidateRule>; | ||
} | ||
@@ -15,2 +24,3 @@ export declare class CodeCompletionCore { | ||
preferredRules: Set<number>; | ||
translateRulesTopDown: boolean; | ||
private parser; | ||
@@ -30,2 +40,3 @@ private atn; | ||
private checkPredicate; | ||
private translateStackToRuleIndex; | ||
private translateToRuleIndex; | ||
@@ -32,0 +43,0 @@ private getFollowingTokens; |
'use strict'; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.CodeCompletionCore = exports.CandidatesCollection = void 0; | ||
const antlr4ts_1 = require("antlr4ts"); | ||
const atn_1 = require("antlr4ts/atn"); | ||
const misc_1 = require("antlr4ts/misc"); | ||
const IntervalSet_1 = require("antlr4ts/misc/IntervalSet"); | ||
class CandidatesCollection { | ||
@@ -33,2 +34,3 @@ constructor() { | ||
this.showRuleStack = false; | ||
this.translateRulesTopDown = false; | ||
this.tokenStartIndex = 0; | ||
@@ -74,3 +76,3 @@ this.statesProcessed = 0; | ||
let token = tokenStream.LT(offset++); | ||
this.tokens.push(token.type); | ||
this.tokens.push(token); | ||
if (token.tokenIndex >= caretTokenIndex || token.type == antlr4ts_1.Token.EOF) | ||
@@ -88,3 +90,3 @@ break; | ||
let path = ""; | ||
for (let token of rule[1]) { | ||
for (let token of rule[1].ruleList) { | ||
path += this.ruleNames[token] + " "; | ||
@@ -112,23 +114,17 @@ } | ||
} | ||
translateToRuleIndex(ruleStack) { | ||
translateStackToRuleIndex(ruleWithStartTokenList) { | ||
if (this.preferredRules.size == 0) | ||
return false; | ||
for (let i = 0; i < ruleStack.length; ++i) { | ||
if (this.preferredRules.has(ruleStack[i])) { | ||
let path = ruleStack.slice(0, i); | ||
let addNew = true; | ||
for (let rule of this.candidates.rules) { | ||
if (rule[0] != ruleStack[i] || rule[1].length != path.length) | ||
continue; | ||
if (path.every((v, j) => v === rule[1][j])) { | ||
addNew = false; | ||
break; | ||
} | ||
if (this.translateRulesTopDown) { | ||
for (let i = ruleWithStartTokenList.length - 1; i >= 0; i--) { | ||
if (this.translateToRuleIndex(i, ruleWithStartTokenList)) { | ||
return true; | ||
} | ||
if (addNew) { | ||
this.candidates.rules.set(ruleStack[i], path); | ||
if (this.showDebugOutput) | ||
console.log("=====> collected: ", this.ruleNames[i]); | ||
} | ||
} | ||
else { | ||
for (let i = 0; i < ruleWithStartTokenList.length; i++) { | ||
if (this.translateToRuleIndex(i, ruleWithStartTokenList)) { | ||
return true; | ||
} | ||
return true; | ||
} | ||
@@ -138,2 +134,27 @@ } | ||
} | ||
translateToRuleIndex(i, ruleWithStartTokenList) { | ||
const { ruleIndex, startTokenIndex } = ruleWithStartTokenList[i]; | ||
if (this.preferredRules.has(ruleIndex)) { | ||
let path = ruleWithStartTokenList.slice(0, i).map(({ ruleIndex }) => ruleIndex); | ||
let addNew = true; | ||
for (let rule of this.candidates.rules) { | ||
if (rule[0] != ruleIndex || rule[1].ruleList.length != path.length) | ||
continue; | ||
if (path.every((v, j) => v === rule[1].ruleList[j])) { | ||
addNew = false; | ||
break; | ||
} | ||
} | ||
if (addNew) { | ||
this.candidates.rules.set(ruleIndex, { | ||
startTokenIndex, | ||
ruleList: path, | ||
}); | ||
if (this.showDebugOutput) | ||
console.log("=====> collected: ", this.ruleNames[ruleIndex]); | ||
} | ||
return true; | ||
} | ||
return false; | ||
} | ||
getFollowingTokens(transition) { | ||
@@ -175,3 +196,3 @@ let result = []; | ||
let set = new FollowSetWithPath(); | ||
set.intervals = misc_1.IntervalSet.of(antlr4ts_1.Token.EPSILON); | ||
set.intervals = IntervalSet_1.IntervalSet.of(antlr4ts_1.Token.EPSILON); | ||
set.path = ruleStack.slice(); | ||
@@ -199,3 +220,3 @@ followSets.push(set); | ||
let set = new FollowSetWithPath(); | ||
set.intervals = misc_1.IntervalSet.of(antlr4ts_1.Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType); | ||
set.intervals = IntervalSet_1.IntervalSet.of(antlr4ts_1.Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType); | ||
set.path = ruleStack.slice(); | ||
@@ -208,3 +229,3 @@ followSets.push(set); | ||
if (transition.serializationType == 8) { | ||
label = label.complement(misc_1.IntervalSet.of(antlr4ts_1.Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType)); | ||
label = label.complement(IntervalSet_1.IntervalSet.of(antlr4ts_1.Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType)); | ||
} | ||
@@ -220,3 +241,3 @@ let set = new FollowSetWithPath(); | ||
} | ||
processRule(startState, tokenIndex, callStack, precedence, indentation) { | ||
processRule(startState, tokenListIndex, callStack, precedence, indentation) { | ||
let positionMap = this.shortcutMap.get(startState.ruleIndex); | ||
@@ -228,7 +249,7 @@ if (!positionMap) { | ||
else { | ||
if (positionMap.has(tokenIndex)) { | ||
if (positionMap.has(tokenListIndex)) { | ||
if (this.showDebugOutput) { | ||
console.log("=====> shortcut"); | ||
} | ||
return positionMap.get(tokenIndex); | ||
return positionMap.get(tokenListIndex); | ||
} | ||
@@ -248,3 +269,3 @@ } | ||
followSets.sets = this.determineFollowSets(startState, stop); | ||
let combined = new misc_1.IntervalSet(); | ||
let combined = new IntervalSet_1.IntervalSet(); | ||
for (let set of followSets.sets) | ||
@@ -254,6 +275,10 @@ combined.addAll(set.intervals); | ||
} | ||
callStack.push(startState.ruleIndex); | ||
if (tokenIndex >= this.tokens.length - 1) { | ||
const startTokenIndex = this.tokens[tokenListIndex].tokenIndex; | ||
callStack.push({ | ||
startTokenIndex, | ||
ruleIndex: startState.ruleIndex, | ||
}); | ||
if (tokenListIndex >= this.tokens.length - 1) { | ||
if (this.preferredRules.has(startState.ruleIndex)) { | ||
this.translateToRuleIndex(callStack); | ||
this.translateStackToRuleIndex(callStack); | ||
} | ||
@@ -263,4 +288,8 @@ else { | ||
let fullPath = callStack.slice(); | ||
fullPath.push(...set.path); | ||
if (!this.translateToRuleIndex(fullPath)) { | ||
const followSetPath = set.path.map(path => ({ | ||
startTokenIndex, | ||
ruleIndex: path, | ||
})); | ||
fullPath.push(...followSetPath); | ||
if (!this.translateStackToRuleIndex(fullPath)) { | ||
for (let symbol of set.intervals.toArray()) | ||
@@ -285,3 +314,3 @@ if (!this.ignoredTokens.has(symbol)) { | ||
else { | ||
let currentSymbol = this.tokens[tokenIndex]; | ||
let currentSymbol = this.tokens[tokenListIndex].type; | ||
if (!followSets.combined.contains(antlr4ts_1.Token.EPSILON) && !followSets.combined.contains(currentSymbol)) { | ||
@@ -297,10 +326,10 @@ callStack.pop(); | ||
let currentEntry; | ||
statePipeline.push({ state: startState, tokenIndex: tokenIndex }); | ||
statePipeline.push({ state: startState, tokenListIndex: tokenListIndex }); | ||
while (statePipeline.length > 0) { | ||
currentEntry = statePipeline.pop(); | ||
++this.statesProcessed; | ||
let currentSymbol = this.tokens[currentEntry.tokenIndex]; | ||
let atCaret = currentEntry.tokenIndex >= this.tokens.length - 1; | ||
let currentSymbol = this.tokens[currentEntry.tokenListIndex].type; | ||
let atCaret = currentEntry.tokenListIndex >= this.tokens.length - 1; | ||
if (this.showDebugOutput) { | ||
this.printDescription(indentation, currentEntry.state, this.generateBaseDescription(currentEntry.state), currentEntry.tokenIndex); | ||
this.printDescription(indentation, currentEntry.state, this.generateBaseDescription(currentEntry.state), currentEntry.tokenListIndex); | ||
if (this.showRuleStack) | ||
@@ -310,3 +339,3 @@ this.printRuleState(callStack); | ||
if (currentEntry.state.stateType == atn_1.ATNStateType.RULE_STOP) { | ||
result.add(currentEntry.tokenIndex); | ||
result.add(currentEntry.tokenListIndex); | ||
continue; | ||
@@ -319,5 +348,5 @@ } | ||
let ruleTransition = transition; | ||
let endStatus = this.processRule(transition.target, currentEntry.tokenIndex, callStack, ruleTransition.precedence, indentation + 1); | ||
let endStatus = this.processRule(transition.target, currentEntry.tokenListIndex, callStack, ruleTransition.precedence, indentation + 1); | ||
for (let position of endStatus) { | ||
statePipeline.push({ state: transition.followState, tokenIndex: position }); | ||
statePipeline.push({ state: transition.followState, tokenListIndex: position }); | ||
} | ||
@@ -328,3 +357,3 @@ break; | ||
if (this.checkPredicate(transition)) | ||
statePipeline.push({ state: transition.target, tokenIndex: currentEntry.tokenIndex }); | ||
statePipeline.push({ state: transition.target, tokenListIndex: currentEntry.tokenListIndex }); | ||
break; | ||
@@ -335,3 +364,3 @@ } | ||
if (predTransition.precedence >= this.precedenceStack[this.precedenceStack.length - 1]) | ||
statePipeline.push({ state: transition.target, tokenIndex: currentEntry.tokenIndex }); | ||
statePipeline.push({ state: transition.target, tokenListIndex: currentEntry.tokenListIndex }); | ||
break; | ||
@@ -341,10 +370,12 @@ } | ||
if (atCaret) { | ||
if (!this.translateToRuleIndex(callStack)) { | ||
for (let token of misc_1.IntervalSet.of(antlr4ts_1.Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType).toArray()) | ||
if (!this.translateStackToRuleIndex(callStack)) { | ||
for (let token of IntervalSet_1.IntervalSet.of(antlr4ts_1.Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType) | ||
.toArray()) { | ||
if (!this.ignoredTokens.has(token)) | ||
this.candidates.tokens.set(token, []); | ||
} | ||
} | ||
} | ||
else { | ||
statePipeline.push({ state: transition.target, tokenIndex: currentEntry.tokenIndex + 1 }); | ||
statePipeline.push({ state: transition.target, tokenListIndex: currentEntry.tokenListIndex + 1 }); | ||
} | ||
@@ -355,3 +386,3 @@ break; | ||
if (transition.isEpsilon) { | ||
statePipeline.push({ state: transition.target, tokenIndex: currentEntry.tokenIndex }); | ||
statePipeline.push({ state: transition.target, tokenListIndex: currentEntry.tokenListIndex }); | ||
continue; | ||
@@ -362,6 +393,6 @@ } | ||
if (transition.serializationType == 8) { | ||
set = set.complement(misc_1.IntervalSet.of(antlr4ts_1.Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType)); | ||
set = set.complement(IntervalSet_1.IntervalSet.of(antlr4ts_1.Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType)); | ||
} | ||
if (atCaret) { | ||
if (!this.translateToRuleIndex(callStack)) { | ||
if (!this.translateStackToRuleIndex(callStack)) { | ||
let list = set.toArray(); | ||
@@ -384,3 +415,6 @@ let addFollowing = list.length == 1; | ||
console.log("=====> consumed: ", this.vocabulary.getDisplayName(currentSymbol)); | ||
statePipeline.push({ state: transition.target, tokenIndex: currentEntry.tokenIndex + 1 }); | ||
statePipeline.push({ | ||
state: transition.target, | ||
tokenListIndex: currentEntry.tokenListIndex + 1 | ||
}); | ||
} | ||
@@ -397,3 +431,3 @@ } | ||
} | ||
positionMap.set(tokenIndex, result); | ||
positionMap.set(tokenListIndex, result); | ||
return result; | ||
@@ -414,3 +448,4 @@ } | ||
if (symbols.length > 2) { | ||
labels = this.vocabulary.getDisplayName(symbols[0]) + " .. " + this.vocabulary.getDisplayName(symbols[symbols.length - 1]); | ||
labels = this.vocabulary.getDisplayName(symbols[0]) + " .. " + | ||
this.vocabulary.getDisplayName(symbols[symbols.length - 1]); | ||
} | ||
@@ -426,4 +461,5 @@ else { | ||
labels = "ε"; | ||
transitionDescription += "\n" + indent + "\t(" + labels + ") " + "[" + transition.target.stateNumber + " " + | ||
this.atnStateTypeMap[transition.target.stateType] + "] in " + this.ruleNames[transition.target.ruleIndex]; | ||
transitionDescription += "\n" + indent + "\t(" + labels + ") " + "[" + | ||
transition.target.stateNumber + " " + this.atnStateTypeMap[transition.target.stateType] + "] in " + | ||
this.ruleNames[transition.target.ruleIndex]; | ||
} | ||
@@ -443,7 +479,7 @@ } | ||
for (let rule of stack) | ||
console.log(this.ruleNames[rule]); | ||
console.log(this.ruleNames[rule.ruleIndex]); | ||
} | ||
} | ||
exports.CodeCompletionCore = CodeCompletionCore; | ||
CodeCompletionCore.followSetsByATN = new Map(); | ||
exports.CodeCompletionCore = CodeCompletionCore; | ||
//# sourceMappingURL=CodeCompletionCore.js.map |
@@ -38,5 +38,5 @@ import { ParseTree } from 'antlr4ts/tree/ParseTree'; | ||
name: string; | ||
readonly baseTypes: Type[]; | ||
readonly kind: TypeKind; | ||
readonly reference: ReferenceKind; | ||
get baseTypes(): Type[]; | ||
get kind(): TypeKind; | ||
get reference(): ReferenceKind; | ||
static readonly integerType: FundamentalType; | ||
@@ -56,14 +56,14 @@ static readonly floatType: FundamentalType; | ||
setParent(parent: Symbol | undefined): void; | ||
readonly parent: Symbol | undefined; | ||
readonly firstSibling: Symbol; | ||
readonly previousSibling: Symbol | undefined; | ||
readonly nextSibling: Symbol | undefined; | ||
readonly lastSibling: Symbol; | ||
readonly next: Symbol | undefined; | ||
get parent(): Symbol | undefined; | ||
get firstSibling(): Symbol; | ||
get previousSibling(): Symbol | undefined; | ||
get nextSibling(): Symbol | undefined; | ||
get lastSibling(): Symbol; | ||
get next(): Symbol | undefined; | ||
removeFromParent(): void; | ||
resolve(name: string, localOnly?: boolean): Symbol | undefined; | ||
readonly root: Symbol | undefined; | ||
readonly symbolTable: SymbolTable | undefined; | ||
get root(): Symbol | undefined; | ||
get symbolTable(): SymbolTable | undefined; | ||
getParentOfType<T extends Symbol>(t: new (...args: any[]) => T): T | undefined; | ||
readonly symbolPath: Symbol[]; | ||
get symbolPath(): Symbol[]; | ||
qualifiedName(separator?: string, full?: boolean, includeAnonymous?: boolean): string; | ||
@@ -77,5 +77,5 @@ protected _parent: Symbol | undefined; | ||
export declare class TypeAlias extends Symbol implements Type { | ||
readonly baseTypes: Type[]; | ||
readonly kind: TypeKind; | ||
readonly reference: ReferenceKind; | ||
get baseTypes(): Type[]; | ||
get kind(): TypeKind; | ||
get reference(): ReferenceKind; | ||
constructor(name: string, target: Type); | ||
@@ -86,3 +86,3 @@ private targetType; | ||
constructor(name?: string); | ||
readonly children: Symbol[]; | ||
get children(): Symbol[]; | ||
clear(): void; | ||
@@ -98,3 +98,3 @@ addSymbol(symbol: Symbol): void; | ||
getTypedSymbolNames(localOnly?: boolean): string[]; | ||
readonly directScopes: ScopedSymbol[]; | ||
get directScopes(): ScopedSymbol[]; | ||
symbolFromPath(path: string, separator?: string): Symbol | undefined; | ||
@@ -104,4 +104,4 @@ indexOfChild(child: Symbol): number; | ||
previousSiblingOf(child: Symbol): Symbol | undefined; | ||
readonly firstChild: Symbol | undefined; | ||
readonly lastChild: Symbol | undefined; | ||
get firstChild(): Symbol | undefined; | ||
get lastChild(): Symbol | undefined; | ||
nextOf(child: Symbol): Symbol | undefined; | ||
@@ -150,5 +150,5 @@ private _children; | ||
export declare class ClassSymbol extends ScopedSymbol implements Type { | ||
readonly baseTypes: Type[]; | ||
readonly kind: TypeKind; | ||
readonly reference: ReferenceKind; | ||
get baseTypes(): Type[]; | ||
get kind(): TypeKind; | ||
get reference(): ReferenceKind; | ||
isStruct: boolean; | ||
@@ -162,5 +162,5 @@ readonly superClasses: ClassSymbol[]; | ||
export declare class ArrayType extends Symbol implements Type { | ||
readonly baseTypes: Type[]; | ||
readonly kind: TypeKind; | ||
readonly reference: ReferenceKind; | ||
get baseTypes(): Type[]; | ||
get kind(): TypeKind; | ||
get reference(): ReferenceKind; | ||
readonly elementType: Type; | ||
@@ -177,3 +177,3 @@ readonly size: number; | ||
removeDependency(table: SymbolTable): void; | ||
readonly info: { | ||
get info(): { | ||
dependencyCount: number; | ||
@@ -180,0 +180,0 @@ symbolCount: number; |
'use strict'; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.SymbolTable = exports.ArrayType = exports.ClassSymbol = exports.FieldSymbol = exports.MethodSymbol = exports.MethodFlags = exports.RoutineSymbol = exports.ParameterSymbol = exports.LiteralSymbol = exports.VariableSymbol = exports.BlockSymbol = exports.NamespaceSymbol = exports.ScopedSymbol = exports.TypeAlias = exports.TypedSymbol = exports.Symbol = exports.FundamentalType = exports.ReferenceKind = exports.TypeKind = exports.MemberVisibility = exports.DuplicateSymbolError = void 0; | ||
class DuplicateSymbolError extends Error { | ||
@@ -46,2 +47,3 @@ } | ||
} | ||
exports.FundamentalType = FundamentalType; | ||
FundamentalType.integerType = new FundamentalType("int", TypeKind.Integer, ReferenceKind.Instance); | ||
@@ -52,3 +54,2 @@ FundamentalType.floatType = new FundamentalType("float", TypeKind.Float, ReferenceKind.Instance); | ||
FundamentalType.dateType = new FundamentalType("date", TypeKind.Date, ReferenceKind.Instance); | ||
exports.FundamentalType = FundamentalType; | ||
class Symbol { | ||
@@ -179,5 +180,2 @@ constructor(name = "") { | ||
class TypeAlias extends Symbol { | ||
get baseTypes() { return [this.targetType]; } | ||
get kind() { return TypeKind.Alias; } | ||
get reference() { return ReferenceKind.Irrelevant; } | ||
constructor(name, target) { | ||
@@ -187,2 +185,5 @@ super(name); | ||
} | ||
get baseTypes() { return [this.targetType]; } | ||
get kind() { return TypeKind.Alias; } | ||
get reference() { return ReferenceKind.Irrelevant; } | ||
} | ||
@@ -469,6 +470,2 @@ exports.TypeAlias = TypeAlias; | ||
class ArrayType extends Symbol { | ||
get baseTypes() { return []; } | ||
; | ||
get kind() { return TypeKind.Array; } | ||
get reference() { return this.referenceKind; } | ||
constructor(name, referenceKind, elemType, size = 0) { | ||
@@ -480,2 +477,6 @@ super(name); | ||
} | ||
get baseTypes() { return []; } | ||
; | ||
get kind() { return TypeKind.Array; } | ||
get reference() { return this.referenceKind; } | ||
} | ||
@@ -482,0 +483,0 @@ exports.ArrayType = ArrayType; |
{ | ||
"name": "antlr4-c3", | ||
"version": "1.1.12", | ||
"description": "A code completion core implmentation for ANTLR4 based parsers", | ||
"version": "1.1.13", | ||
"description": "A code completion core implementation for ANTLR4 based parsers", | ||
"main": "out/index.js", | ||
@@ -27,10 +27,10 @@ "typings": "out/index.d.ts", | ||
"devDependencies": { | ||
"@types/chai": "^4.1.7", | ||
"@types/mocha": "^5.2.6", | ||
"@types/node": "^10.14.4", | ||
"@types/chai": "^4.2.12", | ||
"@types/mocha": "^8.0.1", | ||
"@types/node": "^14.0.27", | ||
"antlr4ts-cli": "^0.5.0-alpha.3", | ||
"chai": ">=4.2.0", | ||
"mocha": "^6.1.4", | ||
"mocha": "^8.1.1", | ||
"path": ">=0.12.7", | ||
"typescript": "^3.4.4" | ||
"typescript": "^3.9.7" | ||
}, | ||
@@ -37,0 +37,0 @@ "author": "Mike Lischke", |
@@ -15,3 +15,3 @@ [![NPM](https://nodei.co/npm/antlr4-c3.png?downloads=true&downloadRank=true)](https://nodei.co/npm/antlr4-c3/) | ||
The c3 engine implementation is based on an idea presented a while ago under [Universal Code Completion using ANTLR3](http://www.soft-gems.net/index.php/tools/47-universal-code-completion-using-antlr3). There a grammar was loaded into a memory structure so that it can be walked through with the current input to find a specific location (usually the caret position) and then collect all possible tokens and special rules, which then describe the possible set of code completion candidates for that position. With ANTLR4 we no longer need to load a grammar, because the grammar structure is now available as part of a parser (via the ATN - [Augmented Transition Network](https://en.wikipedia.org/wiki/Augmented_transition_network)). The ANTLR4 runtime even provides the [LL1Analyzer](https://github.com/antlr/antlr4/blob/master/runtime/Java/src/org/antlr/v4/runtime/atn/LL1Analyzer.java) class, which helps with retrieving follow sets for a given ATN state, but has a few shortcomings and is in general not easy to use. | ||
The c3 engine implementation is based on an idea presented a while ago under [Universal Code Completion using ANTLR3](https://soft-gems.net/universal-code-completion-using-antlr3/). There a grammar was loaded into a memory structure so that it can be walked through with the current input to find a specific location (usually the caret position) and then collect all possible tokens and special rules, which then describe the possible set of code completion candidates for that position. With ANTLR4 we no longer need to load a grammar, because the grammar structure is now available as part of a parser (via the ATN - [Augmented Transition Network](https://en.wikipedia.org/wiki/Augmented_transition_network)). The ANTLR4 runtime even provides the [LL1Analyzer](https://github.com/antlr/antlr4/blob/master/runtime/Java/src/org/antlr/v4/runtime/atn/LL1Analyzer.java) class, which helps with retrieving follow sets for a given ATN state, but has a few shortcomings and is in general not easy to use. | ||
@@ -116,8 +116,12 @@ With the Code Completion Core implementation things become a lot easier. In the simplest setup you only give it a parser instance and a caret position and it will return the candidates for it. Still, a full code completion implementation requires some support code that we need to discuss first before we can come to the actual usage of the c3 engine. | ||
public tokens: Map<number, TokenList>; | ||
public rules: Map<number, RuleList>; | ||
public rules: Map<number, CandidateRule>; | ||
}; | ||
``` | ||
where the map keys are the lexer tokens and the rule indices, respectively. Both can come with additional numbers, which you may or may not use for your implementation. For parser rules the list represents the call stack at which the given rule was found during evaluation. This allows to determine a context for rules that are used in different places. For the lexer tokens the list consists of further token ids which directly follow the given token in the grammar (if any). This allows you to show **token sequences** if they are always used together. For example consider this SQL rule: | ||
where the map keys are the lexer tokens and the rule indices, respectively. Both can come with additional values, which you may or may not use for your implementation. | ||
For parser rules the value includes a `startTokenIndex`, which reflects the index of the starting token within the evaluated rule. This allows consumers to determine the range of tokens that should be replaced or matched against when resolving symbols for your rule. The value also contains a rule list which represents the call stack at which the given rule was found during evaluation. This allows consumers to determine a context for rules that are used in different places. | ||
For the lexer tokens the list consists of further token ids which directly follow the given token in the grammar (if any). This allows you to show **token sequences** if they are always used together. For example consider this SQL rule: | ||
```antlr | ||
@@ -214,2 +218,7 @@ createTable: CREATE TABLE (IF NOT EXISTS)? ...; | ||
### 1.1.13 | ||
- Added a C# port of the library (thanks to Jonathan Philipps) | ||
- Optionally allow to walk the rule stack on matching a preferred rule either top-down or bottom-up (which changes how preference is given to multiple preferred rules in a single stack). | ||
- Rule candidates now include the start token index of where they matched. | ||
### 1.1.12 | ||
@@ -216,0 +225,0 @@ - Updated modules with known vulnerabilities. |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
183354
19
1347
246