Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

langium-cli

Package Overview
Dependencies
Maintainers
2
Versions
45
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

langium-cli - npm Package Compare versions

Comparing version 0.2.0 to 0.3.0-next.5e9d27d

164

langium-config-schema.json
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$defs": {
"config": {
"chevrotainParserConfig": {
"description": "An object to describe the Chevrotain parser configuration",
"type": "object",
"description": "A langium cli configuration",
"additionalProperties": false,
"properties": {
"grammar": {
"description": "The relative file path to the .langium grammar file",
"type": "string"
"recoveryEnabled": {
"description": "Is the error recovery / fault tolerance of the Chevrotain Parser enabled",
"type": "boolean"
},
"fileExtensions": {
"description": "The file extensions used by the DSL",
"type": "array",
"items": [
{
"type": "string"
}
]
"maxLookahead": {
"description": "Maximum number of tokens the parser will use to choose between alternatives",
"type": "number"
},
"languageId": {
"description": "The Id used to refer to the DSL",
"type": "string"
"dynamicTokensEnabled": {
"description": "A flag to support Dynamically defined Tokens",
"type": "boolean"
},
"out": {
"description": "The relative directory to the code generated by the langium-cli",
"nodeLocationTracking": {
"description": "Enable computation of CST nodes location",
"type": "string"
},
"textMate": {
"description": "An object to describe the textMate grammar properties",
"type": "object",
"properties": {
"out": {
"description": "The output file path to the generated textMate grammar",
"type": "string"
}
},
"required": [
"out"
"traceInitPerf": {
"description": "A flag to print performance tracing logs during parser initialization",
"type": [
"boolean",
"number"
]
},
"chevrotainParserConfig": {
"description": "An object to describe the Chevrotain parser configuration",
"skipValidations": {
"description": "A flag to avoid running the grammar validations during Parser initialization",
"type": "boolean"
}
}
}
},
"type": "object",
"description": "A langium cli configuration",
"additionalProperties": false,
"properties": {
"projectName": {
"description": "The name of your Langium project",
"type": "string"
},
"languages": {
"description": "Your language configurations",
"type": "array",
"items": [
{
"type": "object",
"description": "A single language configuration",
"additionalProperties": false,
"properties": {
"recoveryEnabled": {
"description": "Is the error recovery / fault tolerance of the Chevrotain Parser enabled",
"type": "boolean"
"id": {
"description": "The Id used to refer to the DSL",
"type": "string"
},
"maxLookahead": {
"description": "Maximum number of tokens the parser will use to choose between alternatives",
"type": "number"
},
"dynamicTokensEnabled": {
"description": "A flag to support Dynamically defined Tokens",
"type": "boolean"
},
"nodeLocationTracking": {
"description": "Enable computation of CST nodes location",
"grammar": {
"description": "The relative file path to the .langium grammar file",
"type": "string"
},
"traceInitPerf": {
"description": "A flag to print performance tracing logs during parser initialization",
"type": ["boolean", "number"]
"fileExtensions": {
"description": "The file extensions used by the DSL",
"type": "array",
"items": [
{
"type": "string"
}
]
},
"skipValidations": {
"description": "A flag to avoid running the grammar validations during Parser initialization",
"caseInsensitive": {
"description": "Enable case-insensitive keywords parsing",
"type": "boolean"
},
"textMate": {
"description": "An object to describe the textMate grammar properties",
"type": "object",
"properties": {
"out": {
"description": "The output file path to the generated textMate grammar",
"type": "string"
}
},
"required": [
"out"
]
},
"chevrotainParserConfig": {
"$ref": "#/$defs/chevrotainParserConfig"
}
},
"required": []
},
"langiumInternal": {
"description": "A flag to determine whether langium uses itself to bootstrap",
"type": "boolean"
"required": [
"id",
"grammar",
"fileExtensions"
]
}
},
"required": [
"grammar",
"fileExtensions",
"languageId"
]
},
"out": {
"description": "The relative directory to the code generated by the langium-cli",
"type": "string"
},
"chevrotainParserConfig": {
"$ref": "#/$defs/chevrotainParserConfig"
},
"langiumInternal": {
"description": "A flag to determine whether langium uses itself to bootstrap",
"type": "boolean"
}
},
"anyOf": [
{
"$ref": "#/$defs/config"
},
{
"type": "array",
"description": "An array of langium configurations.",
"items": {
"$ref": "#/$defs/config"
}
}
"required": [
"languages",
"projectName",
"out"
]
}
}

@@ -12,3 +12,3 @@ /******************************************************************************

export declare type GeneratorResult = 'success' | 'failure';
export declare function generate(config: LangiumConfig): Promise<GeneratorResult>;
export declare function generate(config: LangiumConfig, options: GenerateOptions): Promise<GeneratorResult>;
//# sourceMappingURL=generate.d.ts.map

@@ -22,73 +22,163 @@ "use strict";

const fs_extra_1 = __importDefault(require("fs-extra"));
const langium_1 = require("langium");
const path_1 = __importDefault(require("path"));
const package_1 = require("./package");
const langium_1 = require("langium");
const vscode_uri_1 = require("vscode-uri");
const ast_generator_1 = require("./generator/ast-generator");
const grammar_serializer_1 = require("./generator/grammar-serializer");
const module_generator_1 = require("./generator/module-generator");
const textmate_generator_1 = require("./generator/textmate-generator");
const grammar_serializer_1 = require("./generator/grammar-serializer");
const util_1 = require("./generator/util");
const package_1 = require("./package");
const parser_validation_1 = require("./parser-validation");
const services = (0, langium_1.createLangiumGrammarServices)();
function generate(config) {
const { shared: sharedServices, grammar: grammarServices } = (0, langium_1.createLangiumGrammarServices)();
const documents = sharedServices.workspace.LangiumDocuments;
function eagerLoad(document, uris = new Set()) {
const uriString = document.uri.toString();
if (!uris.has(uriString)) {
uris.add(uriString);
const grammar = document.parseResult.value;
if ((0, langium_1.isGrammar)(grammar)) {
for (const imp of grammar.imports) {
const importedGrammar = (0, langium_1.resolveImport)(documents, imp);
if (importedGrammar) {
const importedDoc = (0, langium_1.getDocument)(importedGrammar);
eagerLoad(importedDoc, uris);
}
}
}
}
return Array.from(uris).map(e => vscode_uri_1.URI.parse(e));
}
/**
* Creates a map that contains all rules of all grammars.
* This includes both input grammars and their transitive dependencies.
*/
function mapRules(grammars, visited = new Set(), map = new Map()) {
for (const grammar of grammars) {
const doc = (0, langium_1.getDocument)(grammar);
const uriString = doc.uri.toString();
if (!visited.has(uriString)) {
visited.add(uriString);
map.set(grammar, grammar.rules.map(e => {
// Create a new array of rules and copy all rules
// Also deactivate all entry rules
const shallowCopy = Object.assign({}, e);
if ((0, langium_1.isParserRule)(shallowCopy)) {
shallowCopy.entry = false;
}
return shallowCopy;
}));
const importedGrammars = grammar.imports.map(e => (0, langium_1.resolveImport)(documents, e));
mapRules(importedGrammars, visited, map);
}
}
return map;
}
function embedReferencedRules(grammar, map) {
const allGrammars = (0, langium_1.resolveTransitiveImports)(documents, grammar);
for (const importedGrammar of allGrammars) {
const rules = map.get(importedGrammar);
if (rules) {
grammar.rules.push(...rules);
}
}
}
function buildAll(config) {
return __awaiter(this, void 0, void 0, function* () {
for (const doc of documents.all) {
documents.invalidateDocument(doc.uri);
}
const map = new Map();
const relPath = config[package_1.RelativePath];
for (const languageConfig of config.languages) {
const absGrammarPath = vscode_uri_1.URI.file(path_1.default.resolve(relPath, languageConfig.grammar));
const document = documents.getOrCreateDocument(absGrammarPath);
const allUris = eagerLoad(document);
yield sharedServices.workspace.DocumentBuilder.update(allUris, []);
}
for (const doc of documents.all) {
const buildResult = yield sharedServices.workspace.DocumentBuilder.build(doc);
map.set(doc.uri.fsPath, buildResult);
}
return map;
});
}
function generate(config, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
// Load, parse and validate the grammar
const relPath = config[package_1.RelativePath];
const absGrammarPath = vscode_uri_1.URI.file(path_1.default.resolve(relPath, config.grammar));
services.documents.LangiumDocuments.invalidateDocument(absGrammarPath);
const document = services.documents.LangiumDocuments.getOrCreateDocument(absGrammarPath);
const buildResult = yield services.documents.DocumentBuilder.build(document);
const diagnostics = buildResult.diagnostics;
if (!(0, langium_1.isGrammar)(document.parseResult.value)) {
console.error((0, util_1.getTime)() + 'Failed to parse the grammar file: ' + config.grammar);
if (!config.languages || config.languages.length === 0) {
(0, util_1.log)('error', options, 'No languages specified in config.');
return 'failure';
}
else if ((diagnostics === null || diagnostics === void 0 ? void 0 : diagnostics.length) && diagnostics.some(e => e.severity === 1)) {
console.error((0, util_1.getTime)() + 'Grammar contains validation errors:');
diagnostics.forEach(e => {
const message = `${e.range.start.line}:${e.range.start.character} - ${e.message}`;
if (e.severity === 1) {
console.error(message.red);
const all = yield buildAll(config);
let hasErrors = false;
for (const [path, buildResult] of all) {
const diagnostics = buildResult.diagnostics;
for (const diagnostic of diagnostics) {
const message = `${vscode_uri_1.Utils.basename(vscode_uri_1.URI.file(path))}:${diagnostic.range.start.line + 1}:${diagnostic.range.start.character + 1} - ${diagnostic.message}`;
if (diagnostic.severity === 1) {
(0, util_1.log)('error', options, message.red);
}
else if (e.severity === 2) {
console.warn(message.yellow);
else if (diagnostic.severity === 2) {
(0, util_1.log)('warn', options, message.yellow);
}
else {
console.log(message);
(0, util_1.log)('log', options, message);
}
});
console.error(`${(0, util_1.getTime)()}Langium generator ${'failed'.red.bold}.`);
return 'failure';
}
if (!hasErrors) {
hasErrors = diagnostics.length > 0 && diagnostics.some(e => e.severity === 1);
}
}
const grammar = document.parseResult.value;
// Create and validate the in-memory parser
const parserAnalysis = (0, parser_validation_1.validateParser)(grammar, config);
if (parserAnalysis instanceof Error) {
console.error(parserAnalysis.toString().red);
if (hasErrors) {
(0, util_1.log)('error', options, `Langium generator ${'failed'.red.bold}.`);
return 'failure';
}
const grammars = [];
const configMap = new Map();
const relPath = config[package_1.RelativePath];
for (const languageConfig of config.languages) {
const absGrammarPath = vscode_uri_1.URI.file(path_1.default.resolve(relPath, languageConfig.grammar)).fsPath;
const buildResult = all.get(absGrammarPath);
if (buildResult) {
const grammar = buildResult.document.parseResult.value;
grammars.push(grammar);
configMap.set(grammar, languageConfig);
}
}
const ruleMap = mapRules(grammars);
for (const grammar of grammars) {
embedReferencedRules(grammar, ruleMap);
// Create and validate the in-memory parser
const parserAnalysis = (0, parser_validation_1.validateParser)(grammar, config);
if (parserAnalysis instanceof Error) {
(0, util_1.log)('error', options, parserAnalysis.toString().red);
return 'failure';
}
}
// Generate the output files
const output = path_1.default.resolve(relPath, (_a = config.out) !== null && _a !== void 0 ? _a : 'src/generated');
console.log(`${(0, util_1.getTime)()}Writing generated files to ${output.white.bold}`);
if (yield rmdirWithFail(output, ['ast.ts', 'grammar.ts', 'grammar-access.ts', 'parser.ts', 'module.ts'])) {
(0, util_1.log)('log', options, `Writing generated files to ${output.white.bold}`);
if (yield rmdirWithFail(output, ['ast.ts', 'grammar.ts', 'module.ts'], options)) {
return 'failure';
}
if (yield mkdirWithFail(output)) {
if (yield mkdirWithFail(output, options)) {
return 'failure';
}
const genAst = (0, ast_generator_1.generateAst)(grammar, config);
yield writeWithFail(path_1.default.resolve(output, 'ast.ts'), genAst);
const serializedGrammar = (0, grammar_serializer_1.serializeGrammar)(services, grammar, config);
yield writeWithFail(path_1.default.resolve(output, 'grammar.ts'), serializedGrammar);
const genModule = (0, module_generator_1.generateModule)(grammar, config);
yield writeWithFail(path_1.default.resolve(output, 'module.ts'), genModule);
if (config.textMate) {
const genTmGrammar = (0, textmate_generator_1.generateTextMate)(grammar, config);
const textMatePath = path_1.default.resolve(relPath, config.textMate.out);
console.log(`${(0, util_1.getTime)()}Writing textmate grammar to ${textMatePath.white.bold}`);
const parentDir = path_1.default.dirname(textMatePath).split(path_1.default.sep).pop();
parentDir && (yield mkdirWithFail(parentDir));
yield writeWithFail(textMatePath, genTmGrammar);
const genAst = (0, ast_generator_1.generateAst)(grammarServices, grammars, config);
yield writeWithFail(path_1.default.resolve(output, 'ast.ts'), genAst, options);
const serializedGrammar = (0, grammar_serializer_1.serializeGrammar)(grammarServices, grammars, config);
yield writeWithFail(path_1.default.resolve(output, 'grammar.ts'), serializedGrammar, options);
const genModule = (0, module_generator_1.generateModule)(grammars, config, configMap);
yield writeWithFail(path_1.default.resolve(output, 'module.ts'), genModule, options);
for (const grammar of grammars) {
const languageConfig = configMap.get(grammar);
if (languageConfig === null || languageConfig === void 0 ? void 0 : languageConfig.textMate) {
const genTmGrammar = (0, textmate_generator_1.generateTextMate)(grammar, languageConfig);
const textMatePath = path_1.default.resolve(relPath, languageConfig.textMate.out);
(0, util_1.log)('log', options, `Writing textmate grammar to ${textMatePath.white.bold}`);
const parentDir = path_1.default.dirname(textMatePath).split(path_1.default.sep).pop();
parentDir && (yield mkdirWithFail(parentDir, options));
yield writeWithFail(textMatePath, genTmGrammar, options);
}
}

@@ -99,3 +189,3 @@ return 'success';

exports.generate = generate;
function rmdirWithFail(dirPath, expectedFiles) {
function rmdirWithFail(dirPath, expectedFiles, options) {
return __awaiter(this, void 0, void 0, function* () {

@@ -106,9 +196,7 @@ try {

if (dirExists) {
if (expectedFiles) {
const existingFiles = yield fs_extra_1.default.readdir(dirPath);
const unexpectedFiles = existingFiles.filter(file => !expectedFiles.includes(path_1.default.basename(file)));
if (unexpectedFiles.length > 0) {
console.log(`${(0, util_1.getTime)()}Found unexpected files in the generated directory: ${unexpectedFiles.map(e => e.yellow).join(', ')}`);
deleteDir = (yield (0, util_1.getUserChoice)(`${(0, util_1.getTime)()}Do you want to delete the files?`, ['yes', 'no'], 'yes')) === 'yes';
}
const existingFiles = yield fs_extra_1.default.readdir(dirPath);
const unexpectedFiles = existingFiles.filter(file => !expectedFiles.includes(path_1.default.basename(file)));
if (unexpectedFiles.length > 0) {
(0, util_1.log)('log', options, `Found unexpected files in the generated directory: ${unexpectedFiles.map(e => e.yellow).join(', ')}`);
deleteDir = (yield (0, util_1.getUserChoice)('Do you want to delete the files?', ['yes', 'no'], 'yes')) === 'yes';
}

@@ -122,3 +210,3 @@ if (deleteDir) {

catch (e) {
console.error(`${(0, util_1.getTime)()}Failed to delete directory ${dirPath.red.bold}`, e);
(0, util_1.log)('error', options, `Failed to delete directory ${dirPath.red.bold}`, e);
return true;

@@ -128,3 +216,3 @@ }

}
function mkdirWithFail(path) {
function mkdirWithFail(path, options) {
return __awaiter(this, void 0, void 0, function* () {

@@ -136,3 +224,3 @@ try {

catch (e) {
console.error(`${(0, util_1.getTime)()}Failed to create directory ${path.red.bold}`, e);
(0, util_1.log)('error', options, `Failed to create directory ${path.red.bold}`, e);
return true;

@@ -142,3 +230,3 @@ }

}
function writeWithFail(path, content) {
function writeWithFail(path, content, options) {
return __awaiter(this, void 0, void 0, function* () {

@@ -149,3 +237,3 @@ try {

catch (e) {
console.error(`${(0, util_1.getTime)()}Failed to write file to ${path.red.bold}`, e);
(0, util_1.log)('error', options, `Failed to write file to ${path.red.bold}`, e);
}

@@ -152,0 +240,0 @@ });

@@ -6,5 +6,5 @@ /******************************************************************************

******************************************************************************/
import { Grammar } from 'langium';
import { Grammar, LangiumServices } from 'langium';
import { LangiumConfig } from '../package';
export declare function generateAst(grammar: Grammar, config: LangiumConfig): string;
export declare function generateAst(services: LangiumServices, grammars: Grammar[], config: LangiumConfig): string;
//# sourceMappingURL=ast-generator.d.ts.map

@@ -12,7 +12,7 @@ "use strict";

const util_1 = require("./util");
function generateAst(grammar, config) {
const types = (0, type_collector_1.collectAst)(grammar);
function generateAst(services, grammars, config) {
const types = (0, type_collector_1.collectAst)(services.shared.workspace.LangiumDocuments, grammars);
const fileNode = new langium_1.CompositeGeneratorNode();
fileNode.append(util_1.generatedHeader, '/* eslint-disable @typescript-eslint/array-type */', langium_1.NL, '/* eslint-disable @typescript-eslint/no-empty-interface */', langium_1.NL);
const crossRef = hasCrossReferences(grammar);
const crossRef = grammars.some(grammar => hasCrossReferences(grammar));
if (config.langiumInternal) {

@@ -27,6 +27,6 @@ fileNode.append(`import { AstNode, AstReflection${crossRef ? ', Reference' : ''} } from '../../syntax-tree';`, langium_1.NL, "import { isAstNode } from '../../utils/ast-util';", langium_1.NL, langium_1.NL);

}
for (const primitiveRule of (0, langium_1.stream)(grammar.rules).filter(langium_1.isParserRule).filter(e => (0, langium_1.isDataTypeRule)(e))) {
for (const primitiveRule of (0, langium_1.stream)(grammars.flatMap(e => e.rules)).distinct().filter(langium_1.isParserRule).filter(e => (0, langium_1.isDataTypeRule)(e))) {
fileNode.append(buildDatatype(primitiveRule), langium_1.NL, langium_1.NL);
}
fileNode.append(generateAstReflection(grammar, types));
fileNode.append(generateAstReflection(config, types));
return (0, langium_1.processGeneratorNode)(fileNode);

@@ -53,6 +53,6 @@ }

}
function generateAstReflection(grammar, interfaces) {
function generateAstReflection(config, interfaces) {
const reflectionNode = new langium_1.CompositeGeneratorNode();
const crossReferenceTypes = buildCrossReferenceTypes(interfaces);
reflectionNode.append('export type ', grammar.name, 'AstType = ', interfaces.map(t => `'${t.name}'`).join(' | '), ';', langium_1.NL, langium_1.NL, 'export type ', grammar.name, 'AstReference = ', crossReferenceTypes.map(e => `'${e.type}:${e.feature}'`).join(' | ') || 'never', ';', langium_1.NL, langium_1.NL, 'export class ', grammar.name, 'AstReflection implements AstReflection {', langium_1.NL, langium_1.NL);
reflectionNode.append('export type ', config.projectName, 'AstType = ', interfaces.map(t => `'${t.name}'`).join(' | '), ';', langium_1.NL, langium_1.NL, 'export type ', config.projectName, 'AstReference = ', crossReferenceTypes.map(e => `'${e.type}:${e.feature}'`).join(' | ') || 'never', ';', langium_1.NL, langium_1.NL, 'export class ', config.projectName, 'AstReflection implements AstReflection {', langium_1.NL, langium_1.NL);
reflectionNode.indent(classBody => {

@@ -67,5 +67,5 @@ classBody.append('getAllTypes(): string[] {', langium_1.NL);

});
classBody.append('}', langium_1.NL, langium_1.NL, 'isSubtype(subtype: string, supertype: string): boolean {', langium_1.NL, buildIsSubtypeMethod(interfaces), '}', langium_1.NL, langium_1.NL, 'getReferenceType(referenceId: ', grammar.name, 'AstReference): string {', langium_1.NL, buildReferenceTypeMethod(interfaces), '}', langium_1.NL);
classBody.append('}', langium_1.NL, langium_1.NL, 'isSubtype(subtype: string, supertype: string): boolean {', langium_1.NL, buildIsSubtypeMethod(interfaces), '}', langium_1.NL, langium_1.NL, 'getReferenceType(referenceId: ', config.projectName, 'AstReference): string {', langium_1.NL, buildReferenceTypeMethod(interfaces), '}', langium_1.NL);
});
reflectionNode.append('}', langium_1.NL, langium_1.NL, 'export const reflection = new ', grammar.name, 'AstReflection();', langium_1.NL);
reflectionNode.append('}', langium_1.NL, langium_1.NL, 'export const reflection = new ', config.projectName, 'AstReflection();', langium_1.NL);
return reflectionNode;

@@ -112,3 +112,4 @@ }

const groups = groupBySupertypes(interfaces.filter(e => e.superTypes.length > 0));
for (const [superTypes, typeGroup] of groups.entries()) {
for (const superTypes of groups.keys()) {
const typeGroup = groups.get(superTypes);
for (const typeItem of typeGroup) {

@@ -134,12 +135,5 @@ switchNode.append(`case ${typeItem.name}:`, langium_1.NL);

function groupBySupertypes(interfaces) {
const map = new Map();
const map = new langium_1.MultiMap();
for (const item of interfaces) {
const key = item.superTypes.join(':');
const collection = map.get(key);
if (!collection) {
map.set(key, [item]);
}
else {
collection.push(item);
}
map.add(item.superTypes.join(':'), item);
}

@@ -146,0 +140,0 @@ return map;

@@ -8,3 +8,3 @@ /******************************************************************************

import { LangiumConfig } from '../package';
export declare function serializeGrammar(services: LangiumServices, grammar: Grammar, config: LangiumConfig): string;
export declare function serializeGrammar(services: LangiumServices, grammars: Grammar[], config: LangiumConfig): string;
//# sourceMappingURL=grammar-serializer.d.ts.map

@@ -12,6 +12,3 @@ "use strict";

const util_1 = require("./util");
function serializeGrammar(services, grammar, config) {
// The json serializer returns strings with \n line delimiter by default
// We need to translate these line endings to the OS specific line ending
const json = services.serializer.JsonSerializer.serialize(grammar, 2).replace(/\\/g, '\\\\').split('\n').join(os_1.EOL);
function serializeGrammar(services, grammars, config) {
const node = new langium_1.CompositeGeneratorNode();

@@ -26,3 +23,12 @@ node.append(util_1.generatedHeader);

node.append(langium_1.NL, langium_1.NL);
node.append('let loaded: Grammar | undefined;', langium_1.NL, 'export const grammar = (): Grammar => loaded || (loaded = loadGrammar(`', json, '`));', langium_1.NL);
for (let i = 0; i < grammars.length; i++) {
const grammar = grammars[i];
// The json serializer returns strings with \n line delimiter by default
// We need to translate these line endings to the OS specific line ending
const json = services.serializer.JsonSerializer.serialize(grammar, 2).replace(/\\/g, '\\\\').split('\n').join(os_1.EOL);
node.append('let loaded', grammar.name, 'Grammar: Grammar | undefined;', langium_1.NL, 'export const ', grammar.name, 'Grammar = (): Grammar => loaded', grammar.name, 'Grammar ||', '(loaded', grammar.name, 'Grammar = loadGrammar(`', json, '`));', langium_1.NL);
if (i < grammars.length - 1) {
node.append(langium_1.NL);
}
}
return (0, langium_1.processGeneratorNode)(node);

@@ -29,0 +35,0 @@ }

@@ -7,4 +7,4 @@ /******************************************************************************

import * as langium from 'langium';
import { LangiumConfig } from '../package';
export declare function generateModule(grammar: langium.Grammar, config: LangiumConfig): string;
import { LangiumConfig, LangiumLanguageConfig } from '../package';
export declare function generateModule(grammars: langium.Grammar[], config: LangiumConfig, grammarConfigMap: Map<langium.Grammar, LangiumLanguageConfig>): string;
//# sourceMappingURL=module-generator.d.ts.map

@@ -11,3 +11,3 @@ "use strict";

const util_1 = require("./util");
function generateModule(grammar, config) {
function generateModule(grammars, config, grammarConfigMap) {
const parserConfig = config.chevrotainParserConfig;

@@ -19,40 +19,81 @@ const node = new langium_1.CompositeGeneratorNode();

node.append("import { Module } from '../../dependency-injection';", langium_1.NL);
node.contents.push("import { LangiumGeneratedServices, LangiumServices } from '../../services';", langium_1.NL);
node.contents.push("import { LangiumGeneratedServices, LangiumGeneratedSharedServices, LangiumSharedServices, LangiumServices } from '../../services';", langium_1.NL);
}
else {
node.append(`import { LangiumGeneratedServices, LangiumServices, LanguageMetaData, Module${parserConfig ? ', IParserConfig' : ''} } from 'langium';`, langium_1.NL);
node.append(`import { LangiumGeneratedServices, LangiumGeneratedSharedServices, LangiumSharedServices, LangiumServices, LanguageMetaData, Module${parserConfig ? ', IParserConfig' : ''} } from 'langium';`, langium_1.NL);
}
node.append('import { ', grammar.name, "AstReflection } from './ast';", langium_1.NL, "import { grammar } from './grammar';", langium_1.NL, langium_1.NL);
node.append('export const languageMetaData: LanguageMetaData = {', langium_1.NL);
node.indent(metaData => {
metaData.append(`languageId: '${config.languageId}',`, langium_1.NL);
metaData.append(`fileExtensions: [${config.fileExtensions && config.fileExtensions.map(e => appendQuotesAndDot(e)).join(', ')}]`, langium_1.NL);
});
node.append('};', langium_1.NL, langium_1.NL);
if (parserConfig) {
node.append('export const parserConfig: IParserConfig = {', langium_1.NL);
node.indent(configNode => {
Object.keys(parserConfig).forEach(key => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const value = parserConfig[key];
configNode.append(`${key}: ${typeof value === 'string' ? `'${value}'` : value},`, langium_1.NL);
});
node.append('import { ', config.projectName, "AstReflection } from './ast';", langium_1.NL, 'import { ');
for (let i = 0; i < grammars.length; i++) {
const grammar = grammars[i];
node.append(grammar.name, 'Grammar');
if (i < grammars.length - 1) {
node.append(', ');
}
}
node.append(" } from './grammar';", langium_1.NL, langium_1.NL);
for (const grammar of grammars) {
const config = grammarConfigMap.get(grammar);
node.append('export const ', grammar.name, 'LanguageMetaData: LanguageMetaData = {', langium_1.NL);
node.indent(metaData => {
metaData.append(`languageId: '${config.id}',`, langium_1.NL);
metaData.append(`fileExtensions: [${config.fileExtensions && config.fileExtensions.map(e => appendQuotesAndDot(e)).join(', ')}],`, langium_1.NL);
metaData.append(`caseInsensitive: ${!!config.caseInsensitive}`, langium_1.NL);
});
node.append('};', langium_1.NL, langium_1.NL);
}
node.append('export const ', grammar.name, 'GeneratedModule: Module<LangiumServices, LangiumGeneratedServices> = {', langium_1.NL);
let needsGeneralParserConfig = false;
for (const grammar of grammars) {
const grammarConfig = grammarConfigMap.get(grammar);
const grammarParserConfig = grammarConfig.chevrotainParserConfig;
if (grammarParserConfig) {
node.append('export const ', grammar.name, 'ParserConfig: IParserConfig = ', generateParserConfig(grammarParserConfig));
}
else {
needsGeneralParserConfig = true;
}
}
if (needsGeneralParserConfig && parserConfig) {
node.append('export const parserConfig: IParserConfig = ', generateParserConfig(parserConfig));
}
node.append('export const ', config.projectName, 'GeneratedSharedModule: Module<LangiumSharedServices, LangiumGeneratedSharedServices> = {', langium_1.NL);
node.indent(moduleNode => {
moduleNode.append('Grammar: () => grammar(),', langium_1.NL, 'AstReflection: () => new ', grammar.name, 'AstReflection(),', langium_1.NL, 'LanguageMetaData: () => languageMetaData,', langium_1.NL, 'parser: {');
if (parserConfig) {
moduleNode.append(langium_1.NL);
moduleNode.indent(parserGroupNode => {
parserGroupNode.append('ParserConfig: () => parserConfig', langium_1.NL);
});
moduleNode.append('AstReflection: () => new ', config.projectName, 'AstReflection()', langium_1.NL);
});
node.append('};', langium_1.NL, langium_1.NL);
for (let i = 0; i < grammars.length; i++) {
const grammar = grammars[i];
const grammarConfig = grammarConfigMap.get(grammar);
node.append('export const ', grammar.name, 'GeneratedModule: Module<LangiumServices, LangiumGeneratedServices> = {', langium_1.NL);
node.indent(moduleNode => {
moduleNode.append('Grammar: () => ', grammar.name, 'Grammar(),', langium_1.NL, 'LanguageMetaData: () => ', grammar.name, 'LanguageMetaData,', langium_1.NL, 'parser: {');
if (parserConfig) {
moduleNode.append(langium_1.NL);
moduleNode.indent(parserGroupNode => {
const parserConfigName = grammarConfig.chevrotainParserConfig
? grammar.name + 'ParserConfig'
: 'parserConfig';
parserGroupNode.append('ParserConfig: () => ', parserConfigName, langium_1.NL);
});
}
moduleNode.append('}', langium_1.NL);
});
node.append('};', langium_1.NL);
if (i < grammars.length - 1) {
node.append(langium_1.NL);
}
moduleNode.append('}', langium_1.NL);
});
node.append('};', langium_1.NL);
}
return (0, langium_1.processGeneratorNode)(node);
}
exports.generateModule = generateModule;
function generateParserConfig(config) {
const node = new langium_1.CompositeGeneratorNode();
node.append('{', langium_1.NL);
node.indent(configNode => {
for (const [key, value] of Object.entries(config)) {
configNode.append(`${key}: ${typeof value === 'string' ? `'${value}'` : value},`, langium_1.NL);
}
});
node.append('};', langium_1.NL, langium_1.NL);
return node;
}
function appendQuotesAndDot(input) {

@@ -59,0 +100,0 @@ if (!input.startsWith('.')) {

@@ -7,3 +7,3 @@ /******************************************************************************

import * as langium from 'langium';
import { LangiumConfig } from '../package';
import { LangiumLanguageConfig } from '../package';
export interface TextMateGrammar {

@@ -44,3 +44,3 @@ repository: Repository;

}
export declare function generateTextMate(grammar: langium.Grammar, config: LangiumConfig): string;
export declare function generateTextMate(grammar: langium.Grammar, config: LangiumLanguageConfig): string;
//# sourceMappingURL=textmate-generator.d.ts.map

@@ -7,4 +7,24 @@ "use strict";

******************************************************************************/
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.generateTextMate = void 0;
const langium = __importStar(require("langium"));
const langium_1 = require("langium");

@@ -15,4 +35,4 @@ const util_1 = require("./util");

const json = {
name: config.languageId,
scopeName: `source.${config.languageId}`,
name: config.id,
scopeName: `source.${config.id}`,
fileTypes: (_a = config.fileExtensions) !== null && _a !== void 0 ? _a : [],

@@ -30,4 +50,4 @@ patterns: getPatterns(grammar, config),

});
patterns.push(getKeywordControl(grammar, config));
patterns.push(getKeywordSymbols(grammar, config));
patterns.push(getControlKeywords(grammar, config));
patterns.push(...getStringPatterns(grammar, config));
return patterns;

@@ -39,11 +59,11 @@ }

if ((0, langium_1.isTerminalRule)(rule) && (0, langium_1.isCommentTerminal)(rule)) {
const parts = (0, langium_1.getCommentParts)(rule.regex);
const parts = (0, langium_1.getTerminalParts)((0, langium_1.terminalRegex)(rule));
for (const part of parts) {
if (part.end) {
commentPatterns.push({
'name': `comment.block.${config.languageId}`,
'name': `comment.block.${config.id}`,
'begin': part.start,
'beginCaptures': {
'0': {
'name': `punctuation.definition.comment.${config.languageId}`
'name': `punctuation.definition.comment.${config.id}`
}

@@ -54,3 +74,3 @@ },

'0': {
'name': `punctuation.definition.comment.${config.languageId}`
'name': `punctuation.definition.comment.${config.id}`
}

@@ -65,7 +85,7 @@ }

'1': {
'name': `punctuation.whitespace.comment.leading.${config.languageId}`
'name': `punctuation.whitespace.comment.leading.${config.id}`
}
},
'end': '(?=$)',
'name': `comment.line.${config.languageId}`
'name': `comment.line.${config.id}`
});

@@ -83,19 +103,75 @@ }

}
function getKeywordControl(grammar, pack) {
const regex = /[A-Za-z]+/;
const keywords = (0, util_1.collectKeywords)(grammar).filter(kw => regex.test(kw));
function getControlKeywords(grammar, pack) {
const regex = /[A-Za-z]/;
const controlKeywords = (0, util_1.collectKeywords)(grammar).filter(kw => regex.test(kw));
const groups = groupKeywords(controlKeywords, pack.caseInsensitive);
return {
'name': `keyword.control.${pack.languageId}`,
'match': `\\b(${keywords.join('|')})\\b`
'name': `keyword.control.${pack.id}`,
'match': groups.join('|')
};
}
function getKeywordSymbols(grammar, pack) {
const regex = /\W/;
const keywordsFiltered = (0, util_1.collectKeywords)(grammar).filter(kw => regex.test(kw));
const keywords = keywordsFiltered.map(kw => `\\${kw}`);
return {
'name': `keyword.symbol.${pack.languageId}`,
'match': `(${keywords.join('|')})`
};
function groupKeywords(keywords, caseInsensitive) {
const groups = { letter: [], leftSpecial: [], rightSpecial: [], special: [] };
keywords.forEach(keyword => {
const keywordPattern = caseInsensitive ? (0, langium_1.getCaseInsensitivePattern)(keyword) : (0, langium_1.escapeRegExp)(keyword);
if (/\w/.test(keyword[0])) {
if (/\w/.test(keyword[keyword.length - 1])) {
groups.letter.push(keywordPattern);
}
else {
groups.rightSpecial.push(keywordPattern);
}
}
else {
if ((/\w/).test(keyword[keyword.length - 1])) {
groups.leftSpecial.push(keywordPattern);
}
else {
groups.special.push(keywordPattern);
}
}
});
const res = [];
if (groups.letter.length)
res.push(`\\b(${groups.letter.join('|')})\\b`);
if (groups.leftSpecial.length)
res.push(`\\B(${groups.leftSpecial.join('|')})\\b`);
if (groups.rightSpecial.length)
res.push(`\\b(${groups.rightSpecial.join('|')})\\B`);
if (groups.special.length)
res.push(`\\B(${groups.special.join('|')})\\B`);
return res;
}
function getStringPatterns(grammar, pack) {
const terminals = langium.stream(grammar.rules).filter(langium.isTerminalRule);
const stringTerminal = terminals.find(e => e.name.toLowerCase() === 'string');
const stringPatterns = [];
if (stringTerminal) {
const parts = (0, langium_1.getTerminalParts)((0, langium_1.terminalRegex)(stringTerminal));
for (const part of parts) {
if (part.end) {
stringPatterns.push({
'name': `string.quoted.${delimiterName(part.start)}.${pack.id}`,
'begin': part.start,
'end': part.end
});
}
}
}
return stringPatterns;
}
function delimiterName(delimiter) {
if (delimiter === "'") {
return 'single';
}
else if (delimiter === '"') {
return 'double';
}
else if (delimiter === '`') {
return 'backtick';
}
else {
return 'delimiter';
}
}
//# sourceMappingURL=textmate-generator.js.map

@@ -7,2 +7,3 @@ /******************************************************************************

import * as langium from 'langium';
import { LangiumDocuments } from 'langium';
declare type Field = {

@@ -24,4 +25,4 @@ name: string;

}
export declare function collectAst(grammar: langium.Grammar): Interface[];
export declare function collectAst(documents: LangiumDocuments, grammars: langium.Grammar[]): Interface[];
export {};
//# sourceMappingURL=type-collector.d.ts.map

@@ -118,5 +118,5 @@ "use strict";

}
function collectAst(grammar) {
function collectAst(documents, grammars) {
const state = createState();
const parserRules = grammar.rules.filter(e => langium.isParserRule(e) && !e.fragment && !(0, langium_1.isDataTypeRule)(e)).map(e => e);
const parserRules = collectAllParserRules(documents, grammars);
const allTypes = [];

@@ -133,2 +133,19 @@ for (const rule of parserRules) {

exports.collectAst = collectAst;
function collectAllParserRules(documents, grammars, rules = new Set(), visited = new Set()) {
for (const grammar of grammars) {
const doc = (0, langium_1.getDocument)(grammar);
if (visited.has(doc.uri)) {
continue;
}
visited.add(doc.uri);
for (const rule of grammar.rules) {
if (langium.isParserRule(rule) && !rule.fragment && !(0, langium_1.isDataTypeRule)(rule)) {
rules.add(rule);
}
}
const importedGrammars = grammar.imports.map(e => (0, langium_1.resolveImport)(documents, e));
collectAllParserRules(documents, importedGrammars, rules, visited);
}
return Array.from(rules);
}
function createState(type) {

@@ -135,0 +152,0 @@ const state = { types: [], cardinalities: [], tree: new TypeTree() };

@@ -7,2 +7,4 @@ /******************************************************************************

import * as langium from 'langium';
import type { GenerateOptions } from '../generate';
export declare function log(level: 'log' | 'warn' | 'error', options: GenerateOptions, message: string, ...args: any[]): void;
export declare function elapsedTime(): string;

@@ -15,3 +17,3 @@ export declare function getTime(): string;

export declare const generatedHeader: langium.GeneratorNode;
export declare const schema: any;
export declare const schema: Promise<any>;
//# sourceMappingURL=util.d.ts.map

@@ -39,3 +39,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.schema = exports.generatedHeader = exports.cliVersion = exports.getUserChoice = exports.getUserInput = exports.collectKeywords = exports.getTime = exports.elapsedTime = void 0;
exports.schema = exports.generatedHeader = exports.cliVersion = exports.getUserChoice = exports.getUserInput = exports.collectKeywords = exports.getTime = exports.elapsedTime = exports.log = void 0;
const langium = __importStar(require("langium"));

@@ -46,2 +46,12 @@ const langium_1 = require("langium");

const readline = __importStar(require("readline"));
//eslint-disable-next-line @typescript-eslint/no-explicit-any
function log(level, options, message, ...args) {
if (options.watch) {
console[level](getTime() + message, ...args);
}
else {
console[level](message, ...args);
}
}
exports.log = log;
let start = process.hrtime();

@@ -122,3 +132,3 @@ function elapsedTime() {

exports.generatedHeader = getGeneratedHeader();
exports.schema = fs_extra_1.default.readJsonSync(path_1.default.resolve(__dirname, '../../langium-config-schema.json'), { encoding: 'utf-8' });
exports.schema = fs_extra_1.default.readJson(path_1.default.resolve(__dirname, '../../langium-config-schema.json'), { encoding: 'utf-8' });
//# sourceMappingURL=util.js.map

@@ -41,7 +41,3 @@ "use strict";

return __awaiter(this, void 0, void 0, function* () {
const configs = (0, package_1.loadConfigs)(options.file);
if (!configs.length) {
console.error('Could not find a langium configuration. Please add a langium-config.json to your project or a langium section to your package.json.'.red);
process.exit(1);
}
const configs = yield (0, package_1.loadConfigs)(options);
const validation = (0, jsonschema_1.validate)(configs, util_1.schema, {

@@ -51,10 +47,11 @@ nestedErrors: true

if (!validation.valid) {
console.error('Error: Your Langium configuration is invalid.'.red);
(0, util_1.log)('error', options, 'Error: Your Langium configuration is invalid.'.red);
const errors = validation.errors.filter(error => error.path.length > 0);
errors.forEach(error => {
console.error(`--> ${error.stack}`);
(0, util_1.log)('error', options, `--> ${error.stack}`);
});
process.exit(1);
}
const allSuccessful = (yield Promise.all(configs.map(callback))).every(e => e === 'success');
const results = yield Promise.all(configs.map(config => callback(config, options)));
const allSuccessful = results.every(result => result === 'success');
if (options.watch) {

@@ -65,12 +62,14 @@ if (allSuccessful) {

console.log((0, util_1.getTime)() + 'Langium generator will continue running in watch mode');
configs.forEach(e => {
const grammarPath = path_1.default.resolve(e[package_1.RelativePath], e.grammar);
fs_extra_1.default.watchFile(grammarPath, () => __awaiter(this, void 0, void 0, function* () {
console.log((0, util_1.getTime)() + 'File change detected. Starting compilation...');
(0, util_1.elapsedTime)();
if ((yield callback(e)) === 'success') {
console.log(`${(0, util_1.getTime)()}Langium generator finished ${'successfully'.green.bold} in ${(0, util_1.elapsedTime)()}ms`);
}
}));
});
for (const config of configs) {
for (const language of config.languages) {
const grammarPath = path_1.default.resolve(config[package_1.RelativePath], language.grammar);
fs_extra_1.default.watchFile(grammarPath, () => __awaiter(this, void 0, void 0, function* () {
console.log((0, util_1.getTime)() + 'File change detected. Starting compilation...');
(0, util_1.elapsedTime)();
if ((yield callback(config, options)) === 'success') {
console.log(`${(0, util_1.getTime)()}Langium generator finished ${'successfully'.green.bold} in ${(0, util_1.elapsedTime)()}ms`);
}
}));
}
}
}

@@ -81,3 +80,3 @@ else if (!allSuccessful) {

else {
console.log(`${(0, util_1.getTime)()}Langium generator finished ${'successfully'.green.bold} in ${(0, util_1.elapsedTime)()}ms`);
console.log(`Langium generator finished ${'successfully'.green.bold} in ${(0, util_1.elapsedTime)()}ms`);
}

@@ -84,0 +83,0 @@ });

@@ -6,3 +6,4 @@ /******************************************************************************

******************************************************************************/
import { IParserConfig } from 'langium';
import type { IParserConfig } from 'langium';
import type { GenerateOptions } from './generate';
export interface Package {

@@ -17,4 +18,13 @@ name: string;

[RelativePath]: string;
projectName: string;
languages: LangiumLanguageConfig[];
/** Main output directory for TypeScript code */
out?: string;
chevrotainParserConfig?: IParserConfig;
/** The following option is meant to be used only by Langium itself */
langiumInternal?: boolean;
}
export interface LangiumLanguageConfig {
/** The identifier of your language as used in vscode */
languageId: string;
id: string;
/** Path to the grammar file */

@@ -24,4 +34,4 @@ grammar: string;

fileExtensions?: string[];
/** Main output directory for TypeScript code */
out?: string;
/** Enable case-insensitive keywords parsing */
caseInsensitive?: boolean;
/** Enable generating a TextMate syntax highlighting file */

@@ -34,6 +44,4 @@ textMate?: {

chevrotainParserConfig?: IParserConfig;
/** The following option is meant to be used only by Langium itself */
langiumInternal?: boolean;
}
export declare function loadConfigs(file: string | undefined): LangiumConfig[];
export declare function loadConfigs(options: GenerateOptions): Promise<LangiumConfig[]>;
//# sourceMappingURL=package.d.ts.map

@@ -7,2 +7,11 @@ "use strict";

******************************************************************************/
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -17,33 +26,38 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.RelativePath = Symbol('RelativePath');
function loadConfigs(file) {
let defaultPath = './langium-config.json';
if (!fs_extra_1.default.existsSync(defaultPath)) {
defaultPath = './package.json';
}
const filePath = path_1.default.normalize(file !== null && file !== void 0 ? file : defaultPath);
const relativePath = path_1.default.dirname(filePath);
console.log(`${(0, util_1.getTime)()}Reading config from ${filePath.white.bold}`);
let obj;
try {
obj = fs_extra_1.default.readJsonSync(filePath, { encoding: 'utf-8' });
}
catch (e) {
console.error((0, util_1.getTime)() + 'Failed to read config file.', e);
process.exit(1);
}
if (Array.isArray(obj)) { // We have an array of configs in our 'langium-config.json'
return obj.map(e => {
e[exports.RelativePath] = relativePath;
return e;
});
}
else if (!('name' in obj)) { // We have a single config in our 'langium-config.json'
obj[exports.RelativePath] = relativePath;
return [obj];
}
else { // Invalid data
return [];
}
function loadConfigs(options) {
return __awaiter(this, void 0, void 0, function* () {
let filePath;
if (options.file) {
filePath = path_1.default.normalize(options.file);
}
else {
let defaultFile = 'langium-config.json';
if (!fs_extra_1.default.existsSync(defaultFile)) {
defaultFile = 'package.json';
}
filePath = path_1.default.normalize(defaultFile);
}
const relativePath = path_1.default.dirname(filePath);
(0, util_1.log)('log', options, `Reading config from ${filePath.white.bold}`);
try {
const obj = yield fs_extra_1.default.readJson(filePath, { encoding: 'utf-8' });
const config = path_1.default.basename(filePath) === 'package.json' ? obj.langium : obj;
if (Array.isArray(config)) {
config.forEach(c => {
c[exports.RelativePath] = relativePath;
});
return config;
}
else {
config[exports.RelativePath] = relativePath;
}
return [config];
}
catch (err) {
(0, util_1.log)('error', options, 'Failed to read config file.'.red, err);
process.exit(1);
}
});
}
exports.loadConfigs = loadConfigs;
//# sourceMappingURL=package.js.map

@@ -14,5 +14,7 @@ "use strict";

const unavailable = () => ({});
const generatedSharedModule = {
AstReflection: unavailable,
};
const generatedModule = {
Grammar: () => grammar,
AstReflection: unavailable,
LanguageMetaData: unavailable,

@@ -23,3 +25,4 @@ parser: {

};
const services = (0, langium_1.inject)((0, langium_1.createDefaultModule)({}), generatedModule);
const shared = (0, langium_1.inject)((0, langium_1.createDefaultSharedModule)(), generatedSharedModule);
const services = (0, langium_1.inject)((0, langium_1.createDefaultModule)({ shared }), generatedModule);
try {

@@ -26,0 +29,0 @@ (0, langium_1.createLangiumParser)(services);

{
"name": "langium-cli",
"version": "0.2.0",
"version": "0.3.0-next.5e9d27d",
"description": "CLI for Langium - the language engineering tool",

@@ -40,3 +40,3 @@ "engines": {

"jsonschema": "^1.4.0",
"langium": "~0.2.0",
"langium": "0.3.0-next.5e9d27d",
"lodash": "^4.17.21"

@@ -43,0 +43,0 @@ },

@@ -8,11 +8,14 @@ /******************************************************************************

import fs from 'fs-extra';
import {
AbstractRule, BuildResult, createLangiumGrammarServices, getDocument, Grammar, isGrammar,
isParserRule, LangiumDocument, resolveImport, resolveTransitiveImports
} from 'langium';
import path from 'path';
import { LangiumConfig, RelativePath } from './package';
import { createLangiumGrammarServices, isGrammar } from 'langium';
import { URI } from 'vscode-uri';
import { URI, Utils } from 'vscode-uri';
import { generateAst } from './generator/ast-generator';
import { serializeGrammar } from './generator/grammar-serializer';
import { generateModule } from './generator/module-generator';
import { generateTextMate } from './generator/textmate-generator';
import { serializeGrammar } from './generator/grammar-serializer';
import { getTime, getUserChoice } from './generator/util';
import { getUserChoice, log } from './generator/util';
import { LangiumConfig, LangiumLanguageConfig, RelativePath } from './package';
import { validateParser } from './parser-validation';

@@ -27,82 +30,178 @@

const services = createLangiumGrammarServices();
const { shared: sharedServices, grammar: grammarServices } = createLangiumGrammarServices();
const documents = sharedServices.workspace.LangiumDocuments;
export async function generate(config: LangiumConfig): Promise<GeneratorResult> {
// Load, parse and validate the grammar
function eagerLoad(document: LangiumDocument, uris: Set<string> = new Set()): URI[] {
const uriString = document.uri.toString();
if (!uris.has(uriString)) {
uris.add(uriString);
const grammar = document.parseResult.value;
if (isGrammar(grammar)) {
for (const imp of grammar.imports) {
const importedGrammar = resolveImport(documents, imp);
if (importedGrammar) {
const importedDoc = getDocument(importedGrammar);
eagerLoad(importedDoc, uris);
}
}
}
}
return Array.from(uris).map(e => URI.parse(e));
}
/**
* Creates a map that contains all rules of all grammars.
* This includes both input grammars and their transitive dependencies.
*/
function mapRules(grammars: Grammar[], visited: Set<string> = new Set(), map: Map<Grammar, AbstractRule[]> = new Map()): Map<Grammar, AbstractRule[]> {
for (const grammar of grammars) {
const doc = getDocument(grammar);
const uriString = doc.uri.toString();
if (!visited.has(uriString)) {
visited.add(uriString);
map.set(grammar, grammar.rules.map(e => {
// Create a new array of rules and copy all rules
// Also deactivate all entry rules
const shallowCopy = {...e};
if (isParserRule(shallowCopy)) {
shallowCopy.entry = false;
}
return shallowCopy;
}));
const importedGrammars = grammar.imports.map(e => resolveImport(documents, e)!);
mapRules(importedGrammars, visited, map);
}
}
return map;
}
function embedReferencedRules(grammar: Grammar, map: Map<Grammar, AbstractRule[]>): void {
const allGrammars = resolveTransitiveImports(documents, grammar);
for (const importedGrammar of allGrammars) {
const rules = map.get(importedGrammar);
if (rules) {
grammar.rules.push(...rules);
}
}
}
async function buildAll(config: LangiumConfig): Promise<Map<string, BuildResult>> {
for (const doc of documents.all) {
documents.invalidateDocument(doc.uri);
}
const map = new Map<string, BuildResult>();
const relPath = config[RelativePath];
const absGrammarPath = URI.file(path.resolve(relPath, config.grammar));
services.documents.LangiumDocuments.invalidateDocument(absGrammarPath);
const document = services.documents.LangiumDocuments.getOrCreateDocument(absGrammarPath);
const buildResult = await services.documents.DocumentBuilder.build(document);
const diagnostics = buildResult.diagnostics;
if (!isGrammar(document.parseResult.value)) {
console.error(getTime() + 'Failed to parse the grammar file: ' + config.grammar);
for (const languageConfig of config.languages) {
const absGrammarPath = URI.file(path.resolve(relPath, languageConfig.grammar));
const document = documents.getOrCreateDocument(absGrammarPath);
const allUris = eagerLoad(document);
await sharedServices.workspace.DocumentBuilder.update(allUris, []);
}
for (const doc of documents.all) {
const buildResult = await sharedServices.workspace.DocumentBuilder.build(doc);
map.set(doc.uri.fsPath, buildResult);
}
return map;
}
export async function generate(config: LangiumConfig, options: GenerateOptions): Promise<GeneratorResult> {
if (!config.languages || config.languages.length === 0) {
log('error', options, 'No languages specified in config.');
return 'failure';
} else if (diagnostics?.length && diagnostics.some(e => e.severity === 1)) {
console.error(getTime() + 'Grammar contains validation errors:');
diagnostics.forEach(e => {
const message = `${e.range.start.line}:${e.range.start.character} - ${e.message}`;
if (e.severity === 1) {
console.error(message.red);
} else if (e.severity === 2) {
console.warn(message.yellow);
}
const all = await buildAll(config);
let hasErrors = false;
for (const [path, buildResult] of all) {
const diagnostics = buildResult.diagnostics;
for (const diagnostic of diagnostics) {
const message = `${Utils.basename(URI.file(path))}:${diagnostic.range.start.line + 1}:${diagnostic.range.start.character + 1} - ${diagnostic.message}`;
if (diagnostic.severity === 1) {
log('error', options, message.red);
} else if (diagnostic.severity === 2) {
log('warn', options, message.yellow);
} else {
console.log(message);
log('log', options, message);
}
});
console.error(`${getTime()}Langium generator ${'failed'.red.bold}.`);
return 'failure';
}
if (!hasErrors) {
hasErrors = diagnostics.length > 0 && diagnostics.some(e => e.severity === 1);
}
}
const grammar = document.parseResult.value;
// Create and validate the in-memory parser
const parserAnalysis = validateParser(grammar, config);
if (parserAnalysis instanceof Error) {
console.error(parserAnalysis.toString().red);
if (hasErrors) {
log('error', options, `Langium generator ${'failed'.red.bold}.`);
return 'failure';
}
const grammars: Grammar[] = [];
const configMap: Map<Grammar, LangiumLanguageConfig> = new Map();
const relPath = config[RelativePath];
for (const languageConfig of config.languages) {
const absGrammarPath = URI.file(path.resolve(relPath, languageConfig.grammar)).fsPath;
const buildResult = all.get(absGrammarPath);
if (buildResult) {
const grammar = buildResult.document.parseResult.value as Grammar;
grammars.push(grammar);
configMap.set(grammar, languageConfig);
}
}
const ruleMap = mapRules(grammars);
for (const grammar of grammars) {
embedReferencedRules(grammar, ruleMap);
// Create and validate the in-memory parser
const parserAnalysis = validateParser(grammar, config);
if (parserAnalysis instanceof Error) {
log('error', options, parserAnalysis.toString().red);
return 'failure';
}
}
// Generate the output files
const output = path.resolve(relPath, config.out ?? 'src/generated');
console.log(`${getTime()}Writing generated files to ${output.white.bold}`);
log('log', options, `Writing generated files to ${output.white.bold}`);
if (await rmdirWithFail(output, ['ast.ts', 'grammar.ts', 'grammar-access.ts', 'parser.ts', 'module.ts'])) {
if (await rmdirWithFail(output, ['ast.ts', 'grammar.ts', 'module.ts'], options)) {
return 'failure';
}
if (await mkdirWithFail(output)) {
if (await mkdirWithFail(output, options)) {
return 'failure';
}
const genAst = generateAst(grammar, config);
await writeWithFail(path.resolve(output, 'ast.ts'), genAst);
const genAst = generateAst(grammarServices, grammars, config);
await writeWithFail(path.resolve(output, 'ast.ts'), genAst, options);
const serializedGrammar = serializeGrammar(services, grammar, config);
await writeWithFail(path.resolve(output, 'grammar.ts'), serializedGrammar);
const serializedGrammar = serializeGrammar(grammarServices, grammars, config);
await writeWithFail(path.resolve(output, 'grammar.ts'), serializedGrammar, options);
const genModule = generateModule(grammar, config);
await writeWithFail(path.resolve(output, 'module.ts'), genModule);
const genModule = generateModule(grammars, config, configMap);
await writeWithFail(path.resolve(output, 'module.ts'), genModule, options);
if (config.textMate) {
const genTmGrammar = generateTextMate(grammar, config);
const textMatePath = path.resolve(relPath, config.textMate.out);
console.log(`${getTime()}Writing textmate grammar to ${textMatePath.white.bold}`);
const parentDir = path.dirname(textMatePath).split(path.sep).pop();
parentDir && await mkdirWithFail(parentDir);
await writeWithFail(textMatePath, genTmGrammar);
for (const grammar of grammars) {
const languageConfig = configMap.get(grammar);
if (languageConfig?.textMate) {
const genTmGrammar = generateTextMate(grammar, languageConfig);
const textMatePath = path.resolve(relPath, languageConfig.textMate.out);
log('log', options, `Writing textmate grammar to ${textMatePath.white.bold}`);
const parentDir = path.dirname(textMatePath).split(path.sep).pop();
parentDir && await mkdirWithFail(parentDir, options);
await writeWithFail(textMatePath, genTmGrammar, options);
}
}
return 'success';
}
async function rmdirWithFail(dirPath: string, expectedFiles?: string[]): Promise<boolean> {
async function rmdirWithFail(dirPath: string, expectedFiles: string[], options: GenerateOptions): Promise<boolean> {
try {
let deleteDir = true;
const dirExists = await fs.pathExists(dirPath);
if(dirExists) {
if (expectedFiles) {
const existingFiles = await fs.readdir(dirPath);
const unexpectedFiles = existingFiles.filter(file => !expectedFiles.includes(path.basename(file)));
if (unexpectedFiles.length > 0) {
console.log(`${getTime()}Found unexpected files in the generated directory: ${unexpectedFiles.map(e => e.yellow).join(', ')}`);
deleteDir = await getUserChoice(`${getTime()}Do you want to delete the files?`, ['yes', 'no'], 'yes') === 'yes';
}
if (dirExists) {
const existingFiles = await fs.readdir(dirPath);
const unexpectedFiles = existingFiles.filter(file => !expectedFiles.includes(path.basename(file)));
if (unexpectedFiles.length > 0) {
log('log', options, `Found unexpected files in the generated directory: ${unexpectedFiles.map(e => e.yellow).join(', ')}`);
deleteDir = await getUserChoice('Do you want to delete the files?', ['yes', 'no'], 'yes') === 'yes';
}

@@ -115,3 +214,3 @@ if (deleteDir) {

} catch (e) {
console.error(`${getTime()}Failed to delete directory ${dirPath.red.bold}`, e);
log('error', options, `Failed to delete directory ${dirPath.red.bold}`, e);
return true;

@@ -121,3 +220,3 @@ }

async function mkdirWithFail(path: string): Promise<boolean> {
async function mkdirWithFail(path: string, options: GenerateOptions): Promise<boolean> {
try {

@@ -127,3 +226,3 @@ await fs.mkdirs(path);

} catch (e) {
console.error(`${getTime()}Failed to create directory ${path.red.bold}`, e);
log('error', options, `Failed to create directory ${path.red.bold}`, e);
return true;

@@ -133,8 +232,8 @@ }

async function writeWithFail(path: string, content: string): Promise<void> {
async function writeWithFail(path: string, content: string, options: GenerateOptions): Promise<void> {
try {
await fs.writeFile(path, content);
} catch (e) {
console.error(`${getTime()}Failed to write file to ${path.red.bold}`, e);
log('error', options, `Failed to write file to ${path.red.bold}`, e);
}
}

@@ -7,3 +7,7 @@ /******************************************************************************

import { GeneratorNode, Grammar, IndentNode, CompositeGeneratorNode, NL, processGeneratorNode, stream, isAlternatives, isKeyword, isParserRule, isDataTypeRule, ParserRule, streamAllContents, isCrossReference } from 'langium';
import {
GeneratorNode, Grammar, IndentNode, CompositeGeneratorNode, NL, processGeneratorNode, stream,
isAlternatives, isKeyword, isParserRule, isDataTypeRule, ParserRule, streamAllContents,
isCrossReference, MultiMap, LangiumServices
} from 'langium';
import { LangiumConfig } from '../package';

@@ -13,4 +17,4 @@ import { collectAst, Interface } from './type-collector';

export function generateAst(grammar: Grammar, config: LangiumConfig): string {
const types = collectAst(grammar);
export function generateAst(services: LangiumServices, grammars: Grammar[], config: LangiumConfig): string {
const types = collectAst(services.shared.workspace.LangiumDocuments, grammars);
const fileNode = new CompositeGeneratorNode();

@@ -22,3 +26,3 @@ fileNode.append(

);
const crossRef = hasCrossReferences(grammar);
const crossRef = grammars.some(grammar => hasCrossReferences(grammar));
if (config.langiumInternal) {

@@ -36,7 +40,7 @@ fileNode.append(

}
for (const primitiveRule of stream(grammar.rules).filter(isParserRule).filter(e => isDataTypeRule(e))) {
for (const primitiveRule of stream(grammars.flatMap(e => e.rules)).distinct().filter(isParserRule).filter(e => isDataTypeRule(e))) {
fileNode.append(buildDatatype(primitiveRule), NL, NL);
}
fileNode.append(generateAstReflection(grammar, types));
fileNode.append(generateAstReflection(config, types));

@@ -70,3 +74,3 @@ return processGeneratorNode(fileNode);

function generateAstReflection(grammar: Grammar, interfaces: Interface[]): GeneratorNode {
function generateAstReflection(config: LangiumConfig, interfaces: Interface[]): GeneratorNode {
const reflectionNode = new CompositeGeneratorNode();

@@ -76,9 +80,9 @@ const crossReferenceTypes = buildCrossReferenceTypes(interfaces);

reflectionNode.append(
'export type ', grammar.name, 'AstType = ',
'export type ', config.projectName, 'AstType = ',
interfaces.map(t => `'${t.name}'`).join(' | '),
';', NL, NL,
'export type ', grammar.name, 'AstReference = ',
'export type ', config.projectName, 'AstReference = ',
crossReferenceTypes.map(e => `'${e.type}:${e.feature}'`).join(' | ') || 'never',
';', NL, NL,
'export class ', grammar.name, 'AstReflection implements AstReflection {', NL, NL
'export class ', config.projectName, 'AstReflection implements AstReflection {', NL, NL
);

@@ -99,3 +103,3 @@

buildIsSubtypeMethod(interfaces), '}', NL, NL,
'getReferenceType(referenceId: ', grammar.name, 'AstReference): string {', NL,
'getReferenceType(referenceId: ', config.projectName, 'AstReference): string {', NL,
buildReferenceTypeMethod(interfaces), '}', NL,

@@ -107,3 +111,3 @@ );

'}', NL, NL,
'export const reflection = new ', grammar.name, 'AstReflection();', NL
'export const reflection = new ', config.projectName, 'AstReflection();', NL
);

@@ -159,3 +163,4 @@

for (const [superTypes, typeGroup] of groups.entries()) {
for (const superTypes of groups.keys()) {
const typeGroup = groups.get(superTypes);
for (const typeItem of typeGroup) {

@@ -182,14 +187,8 @@ switchNode.append(`case ${typeItem.name}:`, NL);

function groupBySupertypes(interfaces: Interface[]): Map<string, Interface[]> {
const map = new Map<string, Interface[]>();
function groupBySupertypes(interfaces: Interface[]): MultiMap<string, Interface> {
const map = new MultiMap<string, Interface>();
for (const item of interfaces) {
const key = item.superTypes.join(':');
const collection = map.get(key);
if (!collection) {
map.set(key, [item]);
} else {
collection.push(item);
}
map.add(item.superTypes.join(':'), item);
}
return map;
}

@@ -12,6 +12,3 @@ /******************************************************************************

export function serializeGrammar(services: LangiumServices, grammar: Grammar, config: LangiumConfig): string {
// The json serializer returns strings with \n line delimiter by default
// We need to translate these line endings to the OS specific line ending
const json = services.serializer.JsonSerializer.serialize(grammar, 2).replace(/\\/g, '\\\\').split('\n').join(EOL);
export function serializeGrammar(services: LangiumServices, grammars: Grammar[], config: LangiumConfig): string {
const node = new CompositeGeneratorNode();

@@ -23,4 +20,3 @@ node.append(generatedHeader);

"import { loadGrammar } from '../grammar-util';", NL,
"import { Grammar } from './ast';"
);
"import { Grammar } from './ast';");
} else {

@@ -31,7 +27,16 @@ node.append("import { loadGrammar, Grammar } from 'langium';");

node.append(
'let loaded: Grammar | undefined;', NL,
'export const grammar = (): Grammar => loaded || (loaded = loadGrammar(`', json, '`));', NL
);
for (let i = 0; i < grammars.length; i++) {
const grammar = grammars[i];
// The json serializer returns strings with \n line delimiter by default
// We need to translate these line endings to the OS specific line ending
const json = services.serializer.JsonSerializer.serialize(grammar, 2).replace(/\\/g, '\\\\').split('\n').join(EOL);
node.append(
'let loaded', grammar.name, 'Grammar: Grammar | undefined;', NL,
'export const ', grammar.name, 'Grammar = (): Grammar => loaded', grammar.name, 'Grammar ||' ,'(loaded', grammar.name, 'Grammar = loadGrammar(`', json, '`));', NL
);
if (i < grammars.length - 1) {
node.append(NL);
}
}
return processGeneratorNode(node);
}

@@ -9,6 +9,6 @@ /******************************************************************************

import { CompositeGeneratorNode, NL, processGeneratorNode } from 'langium';
import { LangiumConfig } from '../package';
import { LangiumConfig, LangiumLanguageConfig } from '../package';
import { generatedHeader } from './util';
export function generateModule(grammar: langium.Grammar, config: LangiumConfig): string {
export function generateModule(grammars: langium.Grammar[], config: LangiumConfig, grammarConfigMap: Map<langium.Grammar, LangiumLanguageConfig>): string {
const parserConfig = config.chevrotainParserConfig;

@@ -21,26 +21,27 @@ const node = new CompositeGeneratorNode();

node.append("import { Module } from '../../dependency-injection';", NL);
node.contents.push("import { LangiumGeneratedServices, LangiumServices } from '../../services';", NL);
node.contents.push("import { LangiumGeneratedServices, LangiumGeneratedSharedServices, LangiumSharedServices, LangiumServices } from '../../services';", NL);
} else {
node.append(`import { LangiumGeneratedServices, LangiumServices, LanguageMetaData, Module${parserConfig ? ', IParserConfig' : ''} } from 'langium';`, NL);
node.append(`import { LangiumGeneratedServices, LangiumGeneratedSharedServices, LangiumSharedServices, LangiumServices, LanguageMetaData, Module${parserConfig ? ', IParserConfig' : ''} } from 'langium';`, NL);
}
node.append(
'import { ', grammar.name, "AstReflection } from './ast';", NL,
"import { grammar } from './grammar';", NL, NL
'import { ', config.projectName, "AstReflection } from './ast';", NL,
'import { '
);
for (let i = 0; i < grammars.length; i++) {
const grammar = grammars[i];
node.append(grammar.name, 'Grammar');
if (i < grammars.length - 1) {
node.append(', ');
}
}
node.append(" } from './grammar';", NL, NL);
node.append('export const languageMetaData: LanguageMetaData = {', NL);
node.indent(metaData => {
metaData.append(`languageId: '${config.languageId}',`, NL);
metaData.append(`fileExtensions: [${config.fileExtensions && config.fileExtensions.map(e => appendQuotesAndDot(e)).join(', ')}]`, NL);
});
node.append('};', NL, NL);
if (parserConfig) {
node.append('export const parserConfig: IParserConfig = {', NL);
node.indent(configNode => {
Object.keys(parserConfig).forEach(key => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const value = (parserConfig as any)[key];
configNode.append(`${key}: ${typeof value === 'string' ? `'${value}'` : value},`, NL);
});
for (const grammar of grammars) {
const config = grammarConfigMap.get(grammar)!;
node.append('export const ', grammar.name, 'LanguageMetaData: LanguageMetaData = {', NL);
node.indent(metaData => {
metaData.append(`languageId: '${config.id}',`, NL);
metaData.append(`fileExtensions: [${config.fileExtensions && config.fileExtensions.map(e => appendQuotesAndDot(e)).join(', ')}],`, NL);
metaData.append(`caseInsensitive: ${!!config.caseInsensitive}`, NL);
});

@@ -50,23 +51,67 @@ node.append('};', NL, NL);

node.append('export const ', grammar.name, 'GeneratedModule: Module<LangiumServices, LangiumGeneratedServices> = {', NL);
let needsGeneralParserConfig = false;
for (const grammar of grammars) {
const grammarConfig = grammarConfigMap.get(grammar)!;
const grammarParserConfig = grammarConfig.chevrotainParserConfig;
if (grammarParserConfig) {
node.append('export const ', grammar.name, 'ParserConfig: IParserConfig = ', generateParserConfig(grammarParserConfig));
} else {
needsGeneralParserConfig = true;
}
}
if (needsGeneralParserConfig && parserConfig) {
node.append('export const parserConfig: IParserConfig = ', generateParserConfig(parserConfig));
}
node.append('export const ', config.projectName, 'GeneratedSharedModule: Module<LangiumSharedServices, LangiumGeneratedSharedServices> = {', NL);
node.indent(moduleNode => {
moduleNode.append(
'Grammar: () => grammar(),', NL,
'AstReflection: () => new ', grammar.name, 'AstReflection(),', NL,
'LanguageMetaData: () => languageMetaData,', NL,
'parser: {',
'AstReflection: () => new ', config.projectName, 'AstReflection()', NL
);
if (parserConfig) {
moduleNode.append(NL);
moduleNode.indent(parserGroupNode => {
parserGroupNode.append('ParserConfig: () => parserConfig', NL);
});
}
moduleNode.append('}', NL);
});
node.append('};', NL);
node.append('};', NL, NL);
for (let i = 0; i < grammars.length; i++) {
const grammar = grammars[i];
const grammarConfig = grammarConfigMap.get(grammar)!;
node.append('export const ', grammar.name, 'GeneratedModule: Module<LangiumServices, LangiumGeneratedServices> = {', NL);
node.indent(moduleNode => {
moduleNode.append(
'Grammar: () => ', grammar.name, 'Grammar(),', NL,
'LanguageMetaData: () => ', grammar.name, 'LanguageMetaData,', NL,
'parser: {'
);
if (parserConfig) {
moduleNode.append(NL);
moduleNode.indent(parserGroupNode => {
const parserConfigName = grammarConfig.chevrotainParserConfig
? grammar.name + 'ParserConfig'
: 'parserConfig';
parserGroupNode.append('ParserConfig: () => ', parserConfigName, NL);
});
}
moduleNode.append('}', NL);
});
node.append('};', NL);
if (i < grammars.length - 1) {
node.append(NL);
}
}
return processGeneratorNode(node);
}
function generateParserConfig(config: langium.IParserConfig): CompositeGeneratorNode {
const node = new CompositeGeneratorNode();
node.append('{', NL);
node.indent(configNode => {
for (const [key, value] of Object.entries(config)) {
configNode.append(`${key}: ${typeof value === 'string' ? `'${value}'` : value},`, NL);
}
});
node.append('};', NL, NL);
return node;
}
function appendQuotesAndDot(input: string): string {

@@ -73,0 +118,0 @@ if (!input.startsWith('.')) {

@@ -8,4 +8,4 @@ /******************************************************************************

import * as langium from 'langium';
import { getCommentParts, isCommentTerminal, isTerminalRule } from 'langium';
import { LangiumConfig } from '../package';
import { escapeRegExp, getCaseInsensitivePattern, getTerminalParts, isCommentTerminal, isTerminalRule, terminalRegex } from 'langium';
import { LangiumLanguageConfig } from '../package';
import { collectKeywords } from './util';

@@ -50,6 +50,6 @@

export function generateTextMate(grammar: langium.Grammar, config: LangiumConfig): string {
export function generateTextMate(grammar: langium.Grammar, config: LangiumLanguageConfig): string {
const json: TextMateGrammar = {
name: config.languageId,
scopeName: `source.${config.languageId}`,
name: config.id,
scopeName: `source.${config.id}`,
fileTypes: config.fileExtensions ?? [],

@@ -63,3 +63,3 @@ patterns: getPatterns(grammar, config),

function getPatterns(grammar: langium.Grammar, config: LangiumConfig): Pattern[] {
function getPatterns(grammar: langium.Grammar, config: LangiumLanguageConfig): Pattern[] {
const patterns: Pattern[] = [];

@@ -69,20 +69,20 @@ patterns.push({

});
patterns.push(getKeywordControl(grammar, config));
patterns.push(getKeywordSymbols(grammar, config));
patterns.push(getControlKeywords(grammar, config));
patterns.push(...getStringPatterns(grammar, config));
return patterns;
}
function getRepository(grammar: langium.Grammar, config: LangiumConfig): Repository {
function getRepository(grammar: langium.Grammar, config: LangiumLanguageConfig): Repository {
const commentPatterns: Pattern[] = [];
for (const rule of grammar.rules) {
if (isTerminalRule(rule) && isCommentTerminal(rule)) {
const parts = getCommentParts(rule.regex);
const parts = getTerminalParts(terminalRegex(rule));
for (const part of parts) {
if (part.end) {
commentPatterns.push({
'name': `comment.block.${config.languageId}`,
'name': `comment.block.${config.id}`,
'begin': part.start,
'beginCaptures': {
'0': {
'name': `punctuation.definition.comment.${config.languageId}`
'name': `punctuation.definition.comment.${config.id}`
}

@@ -93,3 +93,3 @@ },

'0': {
'name': `punctuation.definition.comment.${config.languageId}`
'name': `punctuation.definition.comment.${config.id}`
}

@@ -103,7 +103,7 @@ }

'1': {
'name': `punctuation.whitespace.comment.leading.${config.languageId}`
'name': `punctuation.whitespace.comment.leading.${config.id}`
}
},
'end': '(?=$)',
'name': `comment.line.${config.languageId}`
'name': `comment.line.${config.id}`
});

@@ -123,19 +123,74 @@ }

function getKeywordControl(grammar: langium.Grammar, pack: LangiumConfig): Pattern {
const regex = /[A-Za-z]+/;
const keywords = collectKeywords(grammar).filter(kw => regex.test(kw));
function getControlKeywords(grammar: langium.Grammar, pack: LangiumLanguageConfig): Pattern {
const regex = /[A-Za-z]/;
const controlKeywords = collectKeywords(grammar).filter(kw => regex.test(kw));
const groups = groupKeywords(controlKeywords, pack.caseInsensitive);
return {
'name': `keyword.control.${pack.languageId}`,
'match': `\\b(${keywords.join('|')})\\b`
'name': `keyword.control.${pack.id}`,
'match': groups.join('|')
};
}
function getKeywordSymbols(grammar: langium.Grammar, pack: LangiumConfig): Pattern {
const regex = /\W/;
const keywordsFiltered = collectKeywords(grammar).filter(kw => regex.test(kw));
const keywords = keywordsFiltered.map(kw => `\\${kw}`);
return {
'name': `keyword.symbol.${pack.languageId}`,
'match': `(${keywords.join('|')})`
};
function groupKeywords(keywords: string[], caseInsensitive: boolean | undefined): string[] {
const groups: {
letter: string[],
leftSpecial: string[],
rightSpecial: string[],
special: string[]
} = {letter: [], leftSpecial: [], rightSpecial: [], special: []};
keywords.forEach(keyword => {
const keywordPattern = caseInsensitive ? getCaseInsensitivePattern(keyword) : escapeRegExp(keyword);
if (/\w/.test(keyword[0])) {
if (/\w/.test(keyword[keyword.length - 1])) {
groups.letter.push(keywordPattern);
} else {
groups.rightSpecial.push(keywordPattern);
}
} else {
if ((/\w/).test(keyword[keyword.length - 1])) {
groups.leftSpecial.push(keywordPattern);
} else {
groups.special.push(keywordPattern);
}
}
});
const res = [];
if (groups.letter.length) res.push(`\\b(${groups.letter.join('|')})\\b`);
if (groups.leftSpecial.length) res.push(`\\B(${groups.leftSpecial.join('|')})\\b`);
if (groups.rightSpecial.length) res.push(`\\b(${groups.rightSpecial.join('|')})\\B`);
if (groups.special.length) res.push(`\\B(${groups.special.join('|')})\\B`);
return res;
}
function getStringPatterns(grammar: langium.Grammar, pack: LangiumLanguageConfig): Pattern[] {
const terminals = langium.stream(grammar.rules).filter(langium.isTerminalRule);
const stringTerminal = terminals.find(e => e.name.toLowerCase() === 'string');
const stringPatterns: Pattern[] = [];
if (stringTerminal) {
const parts = getTerminalParts(terminalRegex(stringTerminal));
for (const part of parts) {
if (part.end) {
stringPatterns.push({
'name': `string.quoted.${delimiterName(part.start)}.${pack.id}`,
'begin': part.start,
'end': part.end
});
}
}
}
return stringPatterns;
}
function delimiterName(delimiter: string): string {
if (delimiter === "'") {
return 'single';
} else if (delimiter === '"') {
return 'double';
} else if (delimiter === '`') {
return 'backtick';
} else {
return 'delimiter';
}
}

@@ -9,6 +9,7 @@ /******************************************************************************

import * as langium from 'langium';
import { getRuleType, getTypeName, isDataTypeRule, isParserRule } from 'langium';
import { getDocument, getRuleType, getTypeName, isDataTypeRule, isParserRule, LangiumDocuments, resolveImport } from 'langium';
import { CompositeGeneratorNode, IndentNode, NL } from 'langium';
import { processGeneratorNode } from 'langium';
import { Cardinality, isOptional } from 'langium';
import { URI } from 'vscode-uri';

@@ -133,6 +134,6 @@ type TypeAlternative = {

export function collectAst(grammar: langium.Grammar): Interface[] {
export function collectAst(documents: LangiumDocuments, grammars: langium.Grammar[]): Interface[] {
const state = createState();
const parserRules = grammar.rules.filter(e => langium.isParserRule(e) && !e.fragment && !isDataTypeRule(e)).map(e => e as langium.ParserRule);
const parserRules = collectAllParserRules(documents, grammars);

@@ -151,2 +152,23 @@ const allTypes: TypeAlternative[] = [];

function collectAllParserRules(documents: LangiumDocuments, grammars: langium.Grammar[], rules: Set<langium.ParserRule> = new Set(), visited: Set<URI> = new Set()): langium.ParserRule[] {
for (const grammar of grammars) {
const doc = getDocument(grammar);
if (visited.has(doc.uri)) {
continue;
}
visited.add(doc.uri);
for (const rule of grammar.rules) {
if (langium.isParserRule(rule) && !rule.fragment && !isDataTypeRule(rule)) {
rules.add(rule);
}
}
const importedGrammars = grammar.imports.map(e => resolveImport(documents, e)!);
collectAllParserRules(documents, importedGrammars, rules, visited);
}
return Array.from(rules);
}
function createState(type?: TypeAlternative): CollectorState {

@@ -153,0 +175,0 @@ const state: CollectorState = { types: [], cardinalities: [], tree: new TypeTree() };

@@ -12,3 +12,13 @@ /******************************************************************************

import * as readline from 'readline';
import type { GenerateOptions } from '../generate';
//eslint-disable-next-line @typescript-eslint/no-explicit-any
export function log(level: 'log' | 'warn' | 'error', options: GenerateOptions, message: string, ...args: any[]): void {
if (options.watch) {
console[level](getTime() + message, ...args);
} else {
console[level](message, ...args);
}
}
let start = process.hrtime();

@@ -97,2 +107,2 @@

export const generatedHeader = getGeneratedHeader();
export const schema = fs.readJsonSync(path.resolve(__dirname, '../../langium-config-schema.json'), { encoding: 'utf-8' });
export const schema = fs.readJson(path.resolve(__dirname, '../../langium-config-schema.json'), { encoding: 'utf-8' });

@@ -12,3 +12,3 @@ /******************************************************************************

import { generate, GenerateOptions, GeneratorResult } from './generate';
import { cliVersion, elapsedTime, getTime, schema } from './generator/util';
import { cliVersion, elapsedTime, getTime, log, schema } from './generator/util';
import { LangiumConfig, loadConfigs, RelativePath } from './package';

@@ -30,8 +30,4 @@ import path from 'path';

async function forEachConfig(options: GenerateOptions, callback: (config: LangiumConfig) => Promise<GeneratorResult>): Promise<void> {
const configs = loadConfigs(options.file);
if (!configs.length) {
console.error('Could not find a langium configuration. Please add a langium-config.json to your project or a langium section to your package.json.'.red);
process.exit(1);
}
async function forEachConfig(options: GenerateOptions, callback: (config: LangiumConfig, options: GenerateOptions) => Promise<GeneratorResult>): Promise<void> {
const configs = await loadConfigs(options);
const validation = validate(configs, schema, {

@@ -41,10 +37,11 @@ nestedErrors: true

if (!validation.valid) {
console.error('Error: Your Langium configuration is invalid.'.red);
log('error', options, 'Error: Your Langium configuration is invalid.'.red);
const errors = validation.errors.filter(error => error.path.length > 0);
errors.forEach(error => {
console.error(`--> ${error.stack}`);
log('error', options, `--> ${error.stack}`);
});
process.exit(1);
}
const allSuccessful = (await Promise.all(configs.map(callback))).every(e => e === 'success');
const results = await Promise.all(configs.map(config => callback(config, options)));
const allSuccessful = results.every(result => result === 'success');
if (options.watch) {

@@ -55,17 +52,19 @@ if (allSuccessful) {

console.log(getTime() + 'Langium generator will continue running in watch mode');
configs.forEach(e => {
const grammarPath = path.resolve(e[RelativePath], e.grammar);
fs.watchFile(grammarPath, async () => {
console.log(getTime() + 'File change detected. Starting compilation...');
elapsedTime();
if (await callback(e) === 'success') {
console.log(`${getTime()}Langium generator finished ${'successfully'.green.bold} in ${elapsedTime()}ms`);
}
});
});
for (const config of configs) {
for (const language of config.languages) {
const grammarPath = path.resolve(config[RelativePath], language.grammar);
fs.watchFile(grammarPath, async () => {
console.log(getTime() + 'File change detected. Starting compilation...');
elapsedTime();
if (await callback(config, options) === 'success') {
console.log(`${getTime()}Langium generator finished ${'successfully'.green.bold} in ${elapsedTime()}ms`);
}
});
}
}
} else if (!allSuccessful) {
process.exit(1);
} else {
console.log(`${getTime()}Langium generator finished ${'successfully'.green.bold} in ${elapsedTime()}ms`);
console.log(`Langium generator finished ${'successfully'.green.bold} in ${elapsedTime()}ms`);
}
}

@@ -8,9 +8,10 @@ /******************************************************************************

import fs from 'fs-extra';
import { IParserConfig } from 'langium';
import type { IParserConfig } from 'langium';
import path from 'path';
import { getTime } from './generator/util';
import type { GenerateOptions } from './generate';
import { log } from './generator/util';
export interface Package {
name: string,
version: string,
name: string
version: string
langium: LangiumConfig

@@ -24,4 +25,14 @@ }

[RelativePath]: string
projectName: string
languages: LangiumLanguageConfig[]
/** Main output directory for TypeScript code */
out?: string
chevrotainParserConfig?: IParserConfig,
/** The following option is meant to be used only by Langium itself */
langiumInternal?: boolean
}
export interface LangiumLanguageConfig {
/** The identifier of your language as used in vscode */
languageId: string
id: string
/** Path to the grammar file */

@@ -31,4 +42,4 @@ grammar: string

fileExtensions?: string[]
/** Main output directory for TypeScript code */
out?: string
/** Enable case-insensitive keywords parsing */
caseInsensitive?: boolean
/** Enable generating a TextMate syntax highlighting file */

@@ -40,33 +51,34 @@ textMate?: {

/** Configure the chevrotain parser */
chevrotainParserConfig?: IParserConfig,
/** The following option is meant to be used only by Langium itself */
langiumInternal?: boolean
chevrotainParserConfig?: IParserConfig
}
export function loadConfigs(file: string | undefined): LangiumConfig[] {
let defaultPath = './langium-config.json';
if (!fs.existsSync(defaultPath)) {
defaultPath = './package.json';
export async function loadConfigs(options: GenerateOptions): Promise<LangiumConfig[]> {
let filePath: string;
if (options.file) {
filePath = path.normalize(options.file);
} else {
let defaultFile = 'langium-config.json';
if (!fs.existsSync(defaultFile)) {
defaultFile = 'package.json';
}
filePath = path.normalize(defaultFile);
}
const filePath = path.normalize(file ?? defaultPath);
const relativePath = path.dirname(filePath);
console.log(`${getTime()}Reading config from ${filePath.white.bold}`);
let obj;
log('log', options, `Reading config from ${filePath.white.bold}`);
try {
obj = fs.readJsonSync(filePath, { encoding: 'utf-8' });
} catch (e) {
console.error(getTime() + 'Failed to read config file.', e);
const obj = await fs.readJson(filePath, { encoding: 'utf-8' });
const config: LangiumConfig | LangiumConfig[] = path.basename(filePath) === 'package.json' ? obj.langium : obj;
if (Array.isArray(config)) {
config.forEach(c => {
c[RelativePath] = relativePath;
});
return config;
} else {
config[RelativePath] = relativePath;
}
return [config];
} catch (err) {
log('error', options, 'Failed to read config file.'.red, err);
process.exit(1);
}
if (Array.isArray(obj)) { // We have an array of configs in our 'langium-config.json'
return obj.map(e => {
e[RelativePath] = relativePath;
return e;
});
} else if (!('name' in obj)) { // We have a single config in our 'langium-config.json'
obj[RelativePath] = relativePath;
return [obj];
} else { // Invalid data
return [];
}
}

@@ -8,4 +8,5 @@ /******************************************************************************

import {
createDefaultModule, createLangiumParser, Grammar, inject, IParserConfig, LangiumGeneratedServices,
LangiumServices, Module
createDefaultModule, createDefaultSharedModule, createLangiumParser, Grammar, inject,
IParserConfig, LangiumGeneratedServices, LangiumGeneratedSharedServices, LangiumServices,
LangiumSharedServices, Module
} from 'langium';

@@ -21,5 +22,7 @@ import { LangiumConfig } from './package';

const unavailable: () => any = () => ({});
const generatedSharedModule: Module<LangiumSharedServices, LangiumGeneratedSharedServices> = {
AstReflection: unavailable,
};
const generatedModule: Module<LangiumServices, LangiumGeneratedServices> = {
Grammar: () => grammar,
AstReflection: unavailable,
LanguageMetaData: unavailable,

@@ -30,3 +33,4 @@ parser: {

};
const services = inject(createDefaultModule({}), generatedModule);
const shared = inject(createDefaultSharedModule(), generatedSharedModule);
const services = inject(createDefaultModule({ shared }), generatedModule);
try {

@@ -33,0 +37,0 @@ createLangiumParser(services);

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc