New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@codingame/monaco-vscode-textmate-service-override

Package Overview
Dependencies
Maintainers
6
Versions
139
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@codingame/monaco-vscode-textmate-service-override - npm Package Compare versions

Comparing version 4.5.1 to 4.5.2

6

package.json
{
"name": "@codingame/monaco-vscode-textmate-service-override",
"version": "4.5.1",
"version": "4.5.2",
"keywords": [],

@@ -32,7 +32,7 @@ "author": {

"dependencies": {
"vscode": "npm:@codingame/monaco-vscode-api@4.5.1",
"vscode": "npm:@codingame/monaco-vscode-api@4.5.2",
"vscode-oniguruma": "1.7.0",
"vscode-textmate": "9.0.0",
"@codingame/monaco-vscode-files-service-override": "4.5.1"
"@codingame/monaco-vscode-files-service-override": "4.5.2"
}
}

@@ -23,2 +23,3 @@ import { __decorate, __param } from 'vscode/external/tslib/tslib.es6.js';

var InspectEditorTokensController_1;
const _moduleId = "vs/workbench/contrib/codeEditor/browser/inspectEditorTokens/inspectEditorTokens";
const $ = $$1;

@@ -59,3 +60,3 @@ let InspectEditorTokensController = class InspectEditorTokensController extends Disposable {

}
this._widget = ( new InspectEditorTokensWidget(
this._widget = ( (new InspectEditorTokensWidget(
this._editor,

@@ -68,3 +69,3 @@ this._textMateService,

this._languageFeaturesService
));
)));
}

@@ -86,10 +87,10 @@ stop() {

};
InspectEditorTokensController = InspectEditorTokensController_1 = ( __decorate([
( __param(1, ITextMateTokenizationService)),
( __param(2, ILanguageService)),
( __param(3, IWorkbenchThemeService)),
( __param(4, INotificationService)),
( __param(5, IConfigurationService)),
( __param(6, ILanguageFeaturesService))
], InspectEditorTokensController));
InspectEditorTokensController = InspectEditorTokensController_1 = ( (__decorate([
( (__param(1, ITextMateTokenizationService))),
( (__param(2, ILanguageService))),
( (__param(3, IWorkbenchThemeService))),
( (__param(4, INotificationService))),
( (__param(5, IConfigurationService))),
( (__param(6, ILanguageFeaturesService)))
], InspectEditorTokensController)));
class InspectEditorTokens extends EditorAction {

@@ -99,7 +100,3 @@ constructor() {

id: 'editor.action.inspectTMScopes',
label: ( localizeWithPath(
'vs/workbench/contrib/codeEditor/browser/inspectEditorTokens/inspectEditorTokens',
'inspectEditorTokens',
"Developer: Inspect Editor Tokens and Scopes"
)),
label: ( localizeWithPath(_moduleId, 0, "Developer: Inspect Editor Tokens and Scopes")),
alias: 'Developer: Inspect Editor Tokens and Scopes',

@@ -150,3 +147,3 @@ precondition: undefined

this._domNode.className = 'token-inspect-widget';
this._currentRequestCancellationTokenSource = ( new CancellationTokenSource());
this._currentRequestCancellationTokenSource = ( (new CancellationTokenSource()));
this._beginCompute(this._editor.getPosition());

@@ -171,7 +168,3 @@ this._register(this._editor.onDidChangeCursorPosition((e) => this._beginCompute(this._editor.getPosition())));

clearNode(this._domNode);
this._domNode.appendChild(document.createTextNode(( localizeWithPath(
'vs/workbench/contrib/codeEditor/browser/inspectEditorTokens/inspectEditorTokens',
'inspectTMScopesWidget.loading',
"Loading..."
))));
this._domNode.appendChild(document.createTextNode(( localizeWithPath(_moduleId, 1, "Loading..."))));
Promise.all([grammar, semanticTokens]).then(([grammar, semanticTokens]) => {

@@ -223,3 +216,3 @@ if (this._isDisposed) {

const propertiesByDefValue = {};
const allDefValues = ( new Array());
const allDefValues = ( (new Array()));
for (const property of properties) {

@@ -229,3 +222,3 @@ if (semanticTokenInfo.metadata[property] !== undefined) {

const defValue = this._renderTokenStyleDefinition(definition, property);
const defValueStr = ( defValue.map(el => el instanceof HTMLElement ? el.outerHTML : el)).join();
const defValueStr = ( (defValue.map(el => el instanceof HTMLElement ? el.outerHTML : el))).join();
let properties = propertiesByDefValue[defValueStr];

@@ -252,3 +245,3 @@ if (!properties) {

}
const scopes = ( new Array());
const scopes = ( (new Array()));
for (let i = textMateTokenInfo.token.scopes.length - 1; i >= 0; i--) {

@@ -278,3 +271,3 @@ scopes.push(textMateTokenInfo.token.scopes[i]);

_formatMetadata(semantic, tm) {
const elements = ( new Array());
const elements = ( (new Array()));
function render(property) {

@@ -291,3 +284,3 @@ const value = semantic?.[property] || tm?.[property];

if (foreground && background) {
const backgroundColor = ( Color.fromHex(background)), foregroundColor = ( Color.fromHex(foreground));
const backgroundColor = ( (Color.fromHex(background))), foregroundColor = ( (Color.fromHex(foreground)));
if (backgroundColor.isOpaque()) {

@@ -300,3 +293,3 @@ elements.push($('tr', undefined, $('td.tiw-metadata-key', undefined, 'contrast ratio'), $('td.tiw-metadata-value', undefined, backgroundColor.getContrastRatio(foregroundColor.makeOpaque(backgroundColor)).toFixed(2))));

}
const fontStyleLabels = ( new Array());
const fontStyleLabels = ( (new Array()));
function addStyle(key) {

@@ -405,3 +398,3 @@ let label;

const lineNumber = position.lineNumber;
const range = ( new Range(lineNumber, 1, lineNumber, this._model.getLineMaxColumn(lineNumber)));
const range = ( (new Range(lineNumber, 1, lineNumber, this._model.getLineMaxColumn(lineNumber))));
const tokens = await Promise.resolve(provider.provideDocumentRangeSemanticTokens(this._model, range, this._currentRequestCancellationTokenSource.token));

@@ -437,3 +430,3 @@ if (this.isSemanticTokens(tokens)) {

}
const range = ( new Range(line + 1, character + 1, line + 1, character + 1 + len));
const range = ( (new Range(line + 1, character + 1, line + 1, character + 1 + len)));
const definitions = {};

@@ -464,3 +457,3 @@ const colorMap = this._themeService.getColorTheme().tokenColorMap;

_renderTokenStyleDefinition(definition, property) {
const elements = ( new Array());
const elements = ( (new Array()));
if (definition === undefined) {

@@ -467,0 +460,0 @@ return elements;

@@ -37,2 +37,3 @@ import { __decorate, __param } from 'vscode/external/tslib/tslib.es6.js';

var TextMateTokenizationFeature_1;
const _moduleId = "vs/workbench/services/textMate/browser/textMateTokenizationFeatureImpl";
let TextMateTokenizationFeature = class TextMateTokenizationFeature extends Disposable {

@@ -59,3 +60,3 @@ static { TextMateTokenizationFeature_1 = this; }

this._grammarFactory = null;
this._tokenizersRegistrations = ( new DisposableStore());
this._tokenizersRegistrations = ( (new DisposableStore()));
this._currentTheme = null;

@@ -97,3 +98,3 @@ this._currentTokenColorMap = null;

if (validatedGrammar.language) {
const lazyTokenizationSupport = ( new LazyTokenizationSupport(() => this._createTokenizationSupport(validatedGrammar.language)));
const lazyTokenizationSupport = ( (new LazyTokenizationSupport(() => this._createTokenizationSupport(validatedGrammar.language))));
this._tokenizersRegistrations.add(lazyTokenizationSupport);

@@ -117,3 +118,3 @@ this._tokenizersRegistrations.add(TokenizationRegistry.registerFactory(validatedGrammar.language, lazyTokenizationSupport));

if (grammar.embeddedLanguages) {
const scopes = ( Object.keys(grammar.embeddedLanguages));
const scopes = ( (Object.keys(grammar.embeddedLanguages)));
for (let i = 0, len = scopes.length; i < len; i++) {

@@ -132,3 +133,3 @@ const scope = scopes[i];

if (grammar.tokenTypes) {
const scopes = ( Object.keys(grammar.tokenTypes));
const scopes = ( (Object.keys(grammar.tokenTypes)));
for (const scope of scopes) {

@@ -173,7 +174,3 @@ const tokenType = grammar.tokenTypes[scope];

if (this._debugMode) {
this._notificationService.error(( localizeWithPath(
'vs/workbench/services/textMate/browser/textMateTokenizationFeatureImpl',
'alreadyDebugging',
"Already Logging."
)));
this._notificationService.error(( localizeWithPath(_moduleId, 0, "Already Logging.")));
return;

@@ -186,12 +183,8 @@ }

location: 15 ,
buttons: [( localizeWithPath(
'vs/workbench/services/textMate/browser/textMateTokenizationFeatureImpl',
'stop',
"Stop"
))]
buttons: [( localizeWithPath(_moduleId, 1, "Stop"))]
}, (progress) => {
progress.report({
message: ( localizeWithPath(
'vs/workbench/services/textMate/browser/textMateTokenizationFeatureImpl',
'progress1',
_moduleId,
2,
"Preparing to log TM Grammar parsing. Press Stop when finished."

@@ -203,9 +196,7 @@ ))

progress.report({
message: ( localizeWithPath(
'vs/workbench/services/textMate/browser/textMateTokenizationFeatureImpl',
'progress2',
"Now logging TM Grammar parsing. Press Stop when finished."
))
message: ( localizeWithPath(_moduleId, 3, "Now logging TM Grammar parsing. Press Stop when finished."))
});
return ( new Promise((resolve, reject) => { }));
return (
(new Promise((resolve, reject) => { }))
);
});

@@ -237,7 +228,7 @@ }, (choice) => {

}
this._grammarFactory = ( new TMGrammarFactory({
this._grammarFactory = ( (new TMGrammarFactory({
logTrace: (msg) => this._logService.trace(msg),
logError: (msg, err) => this._logService.error(msg, err),
readFile: (resource) => this._extensionResourceLoaderService.readExtensionResource(resource)
}, this._grammarDefinitions || [], vscodeTextmate, onigLib));
}, this._grammarDefinitions || [], vscodeTextmate, onigLib)));
this._updateTheme(this._themeService.getColorTheme(), true);

@@ -255,3 +246,3 @@ return this._grammarFactory;

const grammarFactory = await this._getOrCreateGrammarFactory();
if (!( grammarFactory.has(languageId))) {
if (!( (grammarFactory.has(languageId)))) {
return null;

@@ -265,3 +256,3 @@ }

const maxTokenizationLineLength = observableConfigValue('editor.maxTokenizationLineLength', languageId, -1, this._configurationService);
const tokenization = ( new TextMateTokenizationSupport(
const tokenization = ( (new TextMateTokenizationSupport(
r.grammar,

@@ -276,3 +267,3 @@ r.initialState,

true
));
)));
tokenization.onDidEncounterLanguage((encodedLanguageId) => {

@@ -285,3 +276,5 @@ if (!this._encounteredLanguages[encodedLanguageId]) {

});
return ( new TokenizationSupportWithLineLimit(encodedLanguageId, tokenization, maxTokenizationLineLength));
return (
(new TokenizationSupportWithLineLimit(encodedLanguageId, tokenization, maxTokenizationLineLength))
);
}

@@ -317,3 +310,3 @@ catch (err) {

const grammarFactory = await this._getOrCreateGrammarFactory();
if (!( grammarFactory.has(languageId))) {
if (!( (grammarFactory.has(languageId)))) {
return null;

@@ -342,3 +335,3 @@ }

if (isWeb) {
const response = await fetch(( ( FileAccess.asBrowserUri('vscode-oniguruma/../onig.wasm')).toString(true)));
const response = await fetch(( (( (FileAccess.asBrowserUri('vscode-oniguruma/../onig.wasm'))).toString(true))));
return await response.arrayBuffer();

@@ -348,4 +341,4 @@ }

const response = await fetch(this._environmentService.isBuilt
? ( ( FileAccess.asBrowserUri(`${nodeModulesAsarUnpackedPath}/vscode-oniguruma/release/onig.wasm`)).toString(true))
: ( ( FileAccess.asBrowserUri(`${nodeModulesPath}/vscode-oniguruma/release/onig.wasm`)).toString(true)));
? ( (( (FileAccess.asBrowserUri(`${nodeModulesAsarUnpackedPath}/vscode-oniguruma/release/onig.wasm`))).toString(true)))
: ( (( (FileAccess.asBrowserUri(`${nodeModulesPath}/vscode-oniguruma/release/onig.wasm`))).toString(true))));
return response;

@@ -376,18 +369,18 @@ }

};
TextMateTokenizationFeature = TextMateTokenizationFeature_1 = ( __decorate([
( __param(0, ILanguageService)),
( __param(1, IWorkbenchThemeService)),
( __param(2, IExtensionResourceLoaderService)),
( __param(3, INotificationService)),
( __param(4, ILogService)),
( __param(5, IConfigurationService)),
( __param(6, IProgressService)),
( __param(7, IWorkbenchEnvironmentService)),
( __param(8, IInstantiationService)),
( __param(9, ITelemetryService))
], TextMateTokenizationFeature));
TextMateTokenizationFeature = TextMateTokenizationFeature_1 = ( (__decorate([
( (__param(0, ILanguageService))),
( (__param(1, IWorkbenchThemeService))),
( (__param(2, IExtensionResourceLoaderService))),
( (__param(3, INotificationService))),
( (__param(4, ILogService))),
( (__param(5, IConfigurationService))),
( (__param(6, IProgressService))),
( (__param(7, IWorkbenchEnvironmentService))),
( (__param(8, IInstantiationService))),
( (__param(9, ITelemetryService)))
], TextMateTokenizationFeature)));
function toColorMap(colorMap) {
const result = [null];
for (let i = 1, len = colorMap.length; i < len; i++) {
result[i] = ( Color.fromHex(colorMap[i]));
result[i] = ( (Color.fromHex(colorMap[i])));
}

@@ -422,4 +415,4 @@ return result;

collector.error(( localizeWithPath(
'vs/workbench/services/textMate/browser/textMateTokenizationFeatureImpl',
'invalid.language',
_moduleId,
4,
"Unknown language in `contributes.{0}.language`. Provided value: {1}",

@@ -433,4 +426,4 @@ grammarsExtPoint.name,

collector.error(( localizeWithPath(
'vs/workbench/services/textMate/browser/textMateTokenizationFeatureImpl',
'invalid.scopeName',
_moduleId,
5,
"Expected string in `contributes.{0}.scopeName`. Provided value: {1}",

@@ -444,4 +437,4 @@ grammarsExtPoint.name,

collector.error(( localizeWithPath(
'vs/workbench/services/textMate/browser/textMateTokenizationFeatureImpl',
'invalid.path.0',
_moduleId,
6,
"Expected string in `contributes.{0}.path`. Provided value: {1}",

@@ -453,6 +446,6 @@ grammarsExtPoint.name,

}
if (syntax.injectTo && (!Array.isArray(syntax.injectTo) || ( syntax.injectTo.some(scope => typeof scope !== 'string')))) {
if (syntax.injectTo && (!Array.isArray(syntax.injectTo) || ( (syntax.injectTo.some(scope => typeof scope !== 'string'))))) {
collector.error(( localizeWithPath(
'vs/workbench/services/textMate/browser/textMateTokenizationFeatureImpl',
'invalid.injectTo',
_moduleId,
7,
"Invalid value in `contributes.{0}.injectTo`. Must be an array of language scope names. Provided value: {1}",

@@ -466,4 +459,4 @@ grammarsExtPoint.name,

collector.error(( localizeWithPath(
'vs/workbench/services/textMate/browser/textMateTokenizationFeatureImpl',
'invalid.embeddedLanguages',
_moduleId,
8,
"Invalid value in `contributes.{0}.embeddedLanguages`. Must be an object map from scope name to language. Provided value: {1}",

@@ -477,4 +470,4 @@ grammarsExtPoint.name,

collector.error(( localizeWithPath(
'vs/workbench/services/textMate/browser/textMateTokenizationFeatureImpl',
'invalid.tokenTypes',
_moduleId,
9,
"Invalid value in `contributes.{0}.tokenTypes`. Must be an object map from scope name to token type. Provided value: {1}",

@@ -489,4 +482,4 @@ grammarsExtPoint.name,

collector.warn(( localizeWithPath(
'vs/workbench/services/textMate/browser/textMateTokenizationFeatureImpl',
'invalid.path.1',
_moduleId,
10,
"Expected `contributes.{0}.path` ({1}) to be included inside extension's folder ({2}). This might make the extension non-portable.",

@@ -493,0 +486,0 @@ grammarsExtPoint.name,

@@ -28,3 +28,3 @@ import { Emitter } from 'vscode/vscode/vs/base/common/event';

tokenize(line, hasEOL, state) {
throw new Error('Not supported!');
throw ( new Error('Not supported!'));
}

@@ -31,0 +31,0 @@ createBackgroundTokenizer(textModel, store) {

@@ -23,3 +23,3 @@ import { nullTokenizeEncoded } from 'vscode/vscode/vs/editor/common/languages/nullTokenize';

tokenize(line, hasEOL, state) {
throw new Error('Not supported!');
throw ( new Error('Not supported!'));
}

@@ -26,0 +26,0 @@ tokenizeEncoded(line, hasEOL, state) {

@@ -79,7 +79,7 @@ import { Disposable } from 'vscode/vscode/vs/base/common/lifecycle';

if (typeof scopeName !== 'string') {
throw new Error(missingTMGrammarErrorMessage);
throw ( new Error(missingTMGrammarErrorMessage));
}
const grammarDefinition = this._scopeRegistry.getGrammarDefinition(scopeName);
if (!grammarDefinition) {
throw new Error(missingTMGrammarErrorMessage);
throw ( new Error(missingTMGrammarErrorMessage));
}

@@ -107,3 +107,3 @@ const embeddedLanguages = grammarDefinition.embeddedLanguages;

if (err.message && err.message.startsWith('No grammar provided for')) {
throw new Error(missingTMGrammarErrorMessage);
throw ( new Error(missingTMGrammarErrorMessage));
}

@@ -110,0 +110,0 @@ throw err;

@@ -8,2 +8,3 @@ import { __decorate, __param } from 'vscode/external/tslib/tslib.es6.js';

const _moduleId = "vs/workbench/services/themes/common/tokenClassificationExtensionPoint";
const tokenClassificationRegistry = getTokenClassificationRegistry();

@@ -13,7 +14,3 @@ const tokenTypeExtPoint = ExtensionsRegistry.registerExtensionPoint({

jsonSchema: {
description: ( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'contributes.semanticTokenTypes',
'Contributes semantic token types.'
)),
description: ( localizeWithPath(_moduleId, 0, 'Contributes semantic token types.')),
type: 'array',

@@ -25,11 +22,7 @@ items: {

type: 'string',
description: ( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'contributes.semanticTokenTypes.id',
'The identifier of the semantic token type'
)),
description: ( localizeWithPath(_moduleId, 1, 'The identifier of the semantic token type')),
pattern: typeAndModifierIdPattern,
patternErrorMessage: ( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'contributes.semanticTokenTypes.id.format',
_moduleId,
2,
'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*'

@@ -40,11 +33,7 @@ )),

type: 'string',
description: ( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'contributes.semanticTokenTypes.superType',
'The super type of the semantic token type'
)),
description: ( localizeWithPath(_moduleId, 3, 'The super type of the semantic token type')),
pattern: typeAndModifierIdPattern,
patternErrorMessage: ( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'contributes.semanticTokenTypes.superType.format',
_moduleId,
4,
'Super types should be in the form letterOrDigit[_-letterOrDigit]*'

@@ -55,7 +44,3 @@ )),

type: 'string',
description: ( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'contributes.color.description',
'The description of the semantic token type'
)),
description: ( localizeWithPath(_moduleId, 5, 'The description of the semantic token type')),
}

@@ -69,7 +54,3 @@ }

jsonSchema: {
description: ( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'contributes.semanticTokenModifiers',
'Contributes semantic token modifiers.'
)),
description: ( localizeWithPath(_moduleId, 6, 'Contributes semantic token modifiers.')),
type: 'array',

@@ -81,11 +62,7 @@ items: {

type: 'string',
description: ( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'contributes.semanticTokenModifiers.id',
'The identifier of the semantic token modifier'
)),
description: ( localizeWithPath(_moduleId, 7, 'The identifier of the semantic token modifier')),
pattern: typeAndModifierIdPattern,
patternErrorMessage: ( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'contributes.semanticTokenModifiers.id.format',
_moduleId,
8,
'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*'

@@ -96,7 +73,3 @@ ))

type: 'string',
description: ( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'contributes.semanticTokenModifiers.description',
'The description of the semantic token modifier'
))
description: ( localizeWithPath(_moduleId, 9, 'The description of the semantic token modifier'))
}

@@ -110,7 +83,3 @@ }

jsonSchema: {
description: ( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'contributes.semanticTokenScopes',
'Contributes semantic token scope maps.'
)),
description: ( localizeWithPath(_moduleId, 10, 'Contributes semantic token scope maps.')),
type: 'array',

@@ -121,7 +90,3 @@ items: {

language: {
description: ( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'contributes.semanticTokenScopes.languages',
'Lists the languge for which the defaults are.'
)),
description: ( localizeWithPath(_moduleId, 11, 'Lists the languge for which the defaults are.')),
type: 'string'

@@ -131,4 +96,4 @@ },

description: ( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'contributes.semanticTokenScopes.scopes',
_moduleId,
12,
'Maps a semantic token (described by semantic token selector) to one or more textMate scopes used to represent that token.'

@@ -153,4 +118,4 @@ )),

collector.error(( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'invalid.id',
_moduleId,
13,
"'configuration.{0}.id' must be defined and can not be empty",

@@ -163,4 +128,4 @@ extensionPoint

collector.error(( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'invalid.id.format',
_moduleId,
14,
"'configuration.{0}.id' must follow the pattern letterOrDigit[-_letterOrDigit]*",

@@ -174,4 +139,4 @@ extensionPoint

collector.error(( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'invalid.superType.format',
_moduleId,
15,
"'configuration.{0}.superType' must follow the pattern letterOrDigit[-_letterOrDigit]*",

@@ -184,4 +149,4 @@ extensionPoint

collector.error(( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'invalid.description',
_moduleId,
16,
"'configuration.{0}.description' must be defined and can not be empty",

@@ -199,7 +164,3 @@ extensionPoint

if (!extensionValue || !Array.isArray(extensionValue)) {
collector.error(( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'invalid.semanticTokenTypeConfiguration',
"'configuration.semanticTokenType' must be an array"
)));
collector.error(( localizeWithPath(_moduleId, 17, "'configuration.semanticTokenType' must be an array")));
return;

@@ -225,7 +186,3 @@ }

if (!extensionValue || !Array.isArray(extensionValue)) {
collector.error(( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'invalid.semanticTokenModifierConfiguration',
"'configuration.semanticTokenModifier' must be an array"
)));
collector.error(( localizeWithPath(_moduleId, 18, "'configuration.semanticTokenModifier' must be an array")));
return;

@@ -251,7 +208,3 @@ }

if (!extensionValue || !Array.isArray(extensionValue)) {
collector.error(( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'invalid.semanticTokenScopes.configuration',
"'configuration.semanticTokenScopes' must be an array"
)));
collector.error(( localizeWithPath(_moduleId, 19, "'configuration.semanticTokenScopes' must be an array")));
return;

@@ -262,4 +215,4 @@ }

collector.error(( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'invalid.semanticTokenScopes.language',
_moduleId,
20,
"'configuration.semanticTokenScopes.language' must be a string"

@@ -271,4 +224,4 @@ )));

collector.error(( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'invalid.semanticTokenScopes.scopes',
_moduleId,
21,
"'configuration.semanticTokenScopes.scopes' must be defined as an object"

@@ -280,6 +233,6 @@ )));

const tmScopes = contribution.scopes[selectorString];
if (!Array.isArray(tmScopes) || ( tmScopes.some(l => typeof l !== 'string'))) {
if (!Array.isArray(tmScopes) || ( (tmScopes.some(l => typeof l !== 'string')))) {
collector.error(( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'invalid.semanticTokenScopes.scopes.value',
_moduleId,
22,
"'configuration.semanticTokenScopes.scopes' values must be an array of strings"

@@ -291,8 +244,8 @@ )));

const selector = tokenClassificationRegistry.parseTokenSelector(selectorString, contribution.language);
tokenClassificationRegistry.registerTokenStyleDefault(selector, { scopesToProbe: ( tmScopes.map(s => s.split(' '))) });
tokenClassificationRegistry.registerTokenStyleDefault(selector, { scopesToProbe: ( (tmScopes.map(s => s.split(' ')))) });
}
catch (e) {
collector.error(( localizeWithPath(
'vs/workbench/services/themes/common/tokenClassificationExtensionPoint',
'invalid.semanticTokenScopes.scopes.selector',
_moduleId,
23,
"configuration.semanticTokenScopes.scopes': Problems parsing selector {0}.",

@@ -312,3 +265,3 @@ selectorString

const selector = tokenClassificationRegistry.parseTokenSelector(selectorString, contribution.language);
tokenClassificationRegistry.registerTokenStyleDefault(selector, { scopesToProbe: ( tmScopes.map(s => s.split(' '))) });
tokenClassificationRegistry.registerTokenStyleDefault(selector, { scopesToProbe: ( (tmScopes.map(s => s.split(' ')))) });
}

@@ -330,7 +283,7 @@ catch (e) {

};
TokenClassificationExtensionPointWorkbenchContribution = ( __decorate([
( __param(0, IInstantiationService))
], TokenClassificationExtensionPointWorkbenchContribution));
TokenClassificationExtensionPointWorkbenchContribution = ( (__decorate([
( (__param(0, IInstantiationService)))
], TokenClassificationExtensionPointWorkbenchContribution)));
registerWorkbenchContribution2(TokenClassificationExtensionPointWorkbenchContribution.ID, TokenClassificationExtensionPointWorkbenchContribution, 1 );
export { TokenClassificationExtensionPoints };
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc