Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@codingame/monaco-vscode-textmate-service-override

Package Overview
Dependencies
Maintainers
0
Versions
135
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@codingame/monaco-vscode-textmate-service-override - npm Package Compare versions

Comparing version 9.0.3 to 10.0.0

6

package.json
{
"name": "@codingame/monaco-vscode-textmate-service-override",
"version": "9.0.3",
"version": "10.0.0",
"keywords": [],

@@ -32,7 +32,7 @@ "author": {

"dependencies": {
"vscode": "npm:@codingame/monaco-vscode-api@9.0.3",
"vscode": "npm:@codingame/monaco-vscode-api@10.0.0",
"vscode-oniguruma": "1.7.0",
"vscode-textmate": "9.1.0",
"@codingame/monaco-vscode-files-service-override": "9.0.3"
"@codingame/monaco-vscode-files-service-override": "10.0.0"
}
}

@@ -11,3 +11,3 @@ import { StandaloneServices } from 'vscode/vscode/vs/editor/standalone/browser/standaloneServices';

import './vscode/src/vs/workbench/services/themes/common/tokenClassificationExtensionPoint.js';
import './vscode/src/vs/workbench/contrib/codeEditor/browser/inspectEditorTokens/inspectEditorTokens.js';
import 'vscode/vscode/vs/workbench/contrib/codeEditor/browser/inspectEditorTokens/inspectEditorTokens';

@@ -20,3 +20,6 @@ const _onigWasm = new URL('vscode-oniguruma/release/onig.wasm', import.meta.url).href;

registerServiceInitializeParticipant(async (accessor) => {
void accessor.get(ILifecycleService).when(2 ).then(() => {
void accessor
.get(ILifecycleService)
.when(2 )
.then(() => {
StandaloneServices.get(ITextMateTokenizationService);

@@ -23,0 +26,0 @@ });

@@ -5,4 +5,2 @@ import { isESM } from 'vscode/vscode/vs/base/common/amd';

import { assertType } from 'vscode/vscode/vs/base/common/types';
import 'vscode/vscode/vs/base/common/charCode';
import 'vscode/vscode/vs/base/common/marshallingIds';
import 'vscode/vscode/vs/base/common/path';

@@ -9,0 +7,0 @@

import { Disposable } from 'vscode/vscode/vs/base/common/lifecycle';
import 'vscode/vscode/vs/base/common/arrays';
import 'vscode/vscode/vs/base/common/event';
import { autorun } from 'vscode/vscode/vs/base/common/observableInternal/autorun';
import 'vscode/vscode/vs/base/common/observableInternal/derived';
import { autorun } from 'vscode/vscode/vs/base/common/observableInternal/autorun';
import 'vscode/vscode/vs/base/common/cancellation';
import { keepObserved } from 'vscode/vscode/vs/base/common/observableInternal/utils';
import 'vscode/vscode/vs/base/common/cancellation';
import { countEOL } from 'vscode/vscode/vs/editor/common/core/eolCounter';

@@ -14,3 +15,2 @@ import { LineRange } from 'vscode/vscode/vs/editor/common/core/lineRange';

import { MonotonousIndexTransformer, ArrayEdit, SingleArrayEdit } from '../arrayOperation.js';
import { applyStateStackDiff, INITIAL } from 'vscode-textmate';

@@ -118,2 +118,3 @@ class TextMateWorkerTokenizerController extends Disposable {

if (!this._applyStateStackDiffFn || !this._initialState) {
const { applyStateStackDiff, INITIAL } = await import('vscode-textmate').then(module => module.default ?? module);
this._applyStateStackDiffFn = applyStateStackDiff;

@@ -120,0 +121,0 @@ this._initialState = INITIAL;

import { __decorate, __param } from 'vscode/external/tslib/tslib.es6.js';
import 'vscode/vscode/vs/base/common/amd';
import { DisposableStore, toDisposable } from 'vscode/vscode/vs/base/common/lifecycle';

@@ -4,0 +3,0 @@ import { nodeModulesPath, FileAccess } from 'vscode/vscode/vs/base/common/network';

@@ -5,4 +5,2 @@ import { URI } from 'vscode/vscode/vs/base/common/uri';

import { TextMateWorkerHost } from './textMateWorkerHost.js';
import * as vscodeTextmate from 'vscode-textmate';
import * as vscodeOniguruma from 'vscode-oniguruma';

@@ -36,2 +34,4 @@ function create(workerServer) {

async _loadTMGrammarFactory(grammarDefinitions, onigurumaWASMUri) {
const vscodeTextmate = await import('vscode-textmate').then(module => module.default ?? module);
const vscodeOniguruma = await import('vscode-oniguruma').then(module => module.default ?? module);
const response = await fetch(onigurumaWASMUri);

@@ -38,0 +38,0 @@ const bytes = await response.arrayBuffer();

import { RunOnceScheduler } from 'vscode/vscode/vs/base/common/async';
import { observableValue } from 'vscode/vscode/vs/base/common/observableInternal/base';
import 'vscode/vscode/vs/base/common/arrays';
import 'vscode/vscode/vs/base/common/event';
import { Disposable } from 'vscode/vscode/vs/base/common/lifecycle';
import 'vscode/vscode/vs/base/common/observableInternal/autorun';
import 'vscode/vscode/vs/base/common/observableInternal/derived';
import 'vscode/vscode/vs/base/common/observableInternal/autorun';
import 'vscode/vscode/vs/base/common/cancellation';
import 'vscode/vscode/vs/base/common/observableInternal/utils';
import 'vscode/vscode/vs/base/common/cancellation';
import 'vscode/vscode/vs/base/common/arrays';
import { setTimeout0 } from 'vscode/vscode/vs/base/common/platform';

@@ -16,4 +18,2 @@ import { LineRange } from 'vscode/vscode/vs/editor/common/core/lineRange';

import { TokenizationSupportWithLineLimit } from '../../tokenizationSupport/tokenizationSupportWithLineLimit.js';
import { diffStateStacksRefEq } from 'vscode-textmate';
import { Disposable } from 'vscode/vscode/vs/base/common/lifecycle';

@@ -88,2 +88,3 @@ class TextMateWorkerTokenizer extends MirrorTextModel {

if (!this._diffStateStacksRefEqFn) {
const { diffStateStacksRefEq } = await import('vscode-textmate').then(module => module.default ?? module);
this._diffStateStacksRefEqFn = diffStateStacksRefEq;

@@ -90,0 +91,0 @@ }

import { __decorate, __param } from 'vscode/external/tslib/tslib.es6.js';
import { resolveAmdNodeModulePath } from '../../../../amdX.js';
import 'vscode/vscode/vs/base/common/amd';
import { createStyleSheet } from 'vscode/vscode/vs/base/browser/dom';

@@ -10,10 +9,10 @@ import { equals } from 'vscode/vscode/vs/base/common/arrays';

import { FileAccess, nodeModulesPath } from 'vscode/vscode/vs/base/common/network';
import 'vscode/vscode/vs/base/common/event';
import 'vscode/vscode/vs/base/common/observableInternal/autorun';
import 'vscode/vscode/vs/base/common/observableInternal/derived';
import 'vscode/vscode/vs/base/common/observableInternal/autorun';
import 'vscode/vscode/vs/base/common/cancellation';
import { observableFromEvent } from 'vscode/vscode/vs/base/common/observableInternal/utils';
import 'vscode/vscode/vs/base/common/cancellation';
import { isWeb } from 'vscode/vscode/vs/base/common/platform';
import { joinPath, isEqualOrParent } from 'vscode/vscode/vs/base/common/resources';
import { isObject } from 'vscode/vscode/vs/base/common/types';
import { StandardTokenType } from 'vscode/vscode/vs/editor/common/encodedTokenAttributes';
import { LazyTokenizationSupport, TokenizationRegistry } from 'vscode/vscode/vs/editor/common/languages';

@@ -28,3 +27,2 @@ import { ILanguageService } from 'vscode/vscode/vs/editor/common/languages/language';

import { INotificationService } from 'vscode/vscode/vs/platform/notification/common/notification.service';
import { ProgressLocation } from 'vscode/vscode/vs/platform/progress/common/progress';
import { IProgressService } from 'vscode/vscode/vs/platform/progress/common/progress.service';

@@ -39,4 +37,2 @@ import { ITelemetryService } from 'vscode/vscode/vs/platform/telemetry/common/telemetry.service';

import { IWorkbenchThemeService } from 'vscode/vscode/vs/workbench/services/themes/common/workbenchThemeService.service';
import * as vscodeTextmate from 'vscode-textmate';
import * as vscodeOniguruma from 'vscode-oniguruma';

@@ -140,9 +136,9 @@ var TextMateTokenizationFeature_1;

case 'string':
tokenTypes[scope] = StandardTokenType.String;
tokenTypes[scope] = 2 ;
break;
case 'other':
tokenTypes[scope] = StandardTokenType.Other;
tokenTypes[scope] = 0 ;
break;
case 'comment':
tokenTypes[scope] = StandardTokenType.Comment;
tokenTypes[scope] = 1 ;
break;

@@ -176,3 +172,3 @@ }

if (this._debugMode) {
this._notificationService.error(( localize(2771, "Already Logging.")));
this._notificationService.error(( localize(3082, "Already Logging.")));
return;

@@ -184,7 +180,7 @@ }

this._progressService.withProgress({
location: ProgressLocation.Notification,
buttons: [( localize(2772, "Stop"))]
location: 15 ,
buttons: [( localize(3083, "Stop"))]
}, (progress) => {
progress.report({
message: ( localize(2773, "Preparing to log TM Grammar parsing. Press Stop when finished."))
message: ( localize(3084, "Preparing to log TM Grammar parsing. Press Stop when finished."))
});

@@ -194,3 +190,3 @@ return this._getVSCodeOniguruma().then((vscodeOniguruma) => {

progress.report({
message: ( localize(2774, "Now logging TM Grammar parsing. Press Stop when finished."))
message: ( localize(3085, "Now logging TM Grammar parsing. Press Stop when finished."))
});

@@ -218,3 +214,3 @@ return (

}
const vscodeOniguruma = await this._getVSCodeOniguruma();
const [vscodeTextmate, vscodeOniguruma] = await Promise.all([import('vscode-textmate').then(module => module.default ?? module), this._getVSCodeOniguruma()]);
const onigLib = Promise.resolve({

@@ -314,3 +310,3 @@ createOnigScanner: (sources) => vscodeOniguruma.createOnigScanner(sources),

this._vscodeOniguruma = (async () => {
const wasm = await this._loadVSCodeOnigurumaWASM();
const [vscodeOniguruma, wasm] = await Promise.all([import('vscode-oniguruma').then(module => module.default ?? module), this._loadVSCodeOnigurumaWASM()]);
await vscodeOniguruma.loadWASM({

@@ -405,3 +401,3 @@ data: wasm,

collector.error(( localize(
2775,
3086,
"Unknown language in `contributes.{0}.language`. Provided value: {1}",

@@ -415,3 +411,3 @@ grammarsExtPoint.name,

collector.error(( localize(
2776,
3087,
"Expected string in `contributes.{0}.scopeName`. Provided value: {1}",

@@ -425,3 +421,3 @@ grammarsExtPoint.name,

collector.error(( localize(
2777,
3088,
"Expected string in `contributes.{0}.path`. Provided value: {1}",

@@ -435,3 +431,3 @@ grammarsExtPoint.name,

collector.error(( localize(
2778,
3089,
"Invalid value in `contributes.{0}.injectTo`. Must be an array of language scope names. Provided value: {1}",

@@ -445,3 +441,3 @@ grammarsExtPoint.name,

collector.error(( localize(
2779,
3090,
"Invalid value in `contributes.{0}.embeddedLanguages`. Must be an object map from scope name to language. Provided value: {1}",

@@ -455,3 +451,3 @@ grammarsExtPoint.name,

collector.error(( localize(
2780,
3091,
"Invalid value in `contributes.{0}.tokenTypes`. Must be an object map from scope name to token type. Provided value: {1}",

@@ -466,3 +462,3 @@ grammarsExtPoint.name,

collector.warn(( localize(
2781,
3092,
"Expected `contributes.{0}.path` ({1}) to be included inside extension's folder ({2}). This might make the extension non-portable.",

@@ -469,0 +465,0 @@ grammarsExtPoint.name,

import { nullTokenizeEncoded } from 'vscode/vscode/vs/editor/common/languages/nullTokenize';
import { Disposable } from 'vscode/vscode/vs/base/common/lifecycle';
import 'vscode/vscode/vs/base/common/arrays';
import 'vscode/vscode/vs/base/common/event';
import 'vscode/vscode/vs/base/common/observableInternal/autorun';
import 'vscode/vscode/vs/base/common/observableInternal/derived';
import 'vscode/vscode/vs/base/common/observableInternal/autorun';
import 'vscode/vscode/vs/base/common/cancellation';
import { keepObserved } from 'vscode/vscode/vs/base/common/observableInternal/utils';
import 'vscode/vscode/vs/base/common/cancellation';

@@ -9,0 +10,0 @@ class TokenizationSupportWithLineLimit extends Disposable {

@@ -5,4 +5,4 @@ import { __decorate, __param } from 'vscode/external/tslib/tslib.es6.js';

import { getTokenClassificationRegistry, typeAndModifierIdPattern } from 'vscode/vscode/vs/platform/theme/common/tokenClassificationRegistry';
import { registerWorkbenchContribution2 } from 'vscode/vscode/vs/workbench/common/contributions';
import { IInstantiationService } from 'vscode/vscode/vs/platform/instantiation/common/instantiation';
import { registerWorkbenchContribution2, WorkbenchPhase } from 'vscode/vscode/vs/workbench/common/contributions';

@@ -13,3 +13,3 @@ const tokenClassificationRegistry = getTokenClassificationRegistry();

jsonSchema: {
description: ( localize(2782, 'Contributes semantic token types.')),
description: ( localize(3093, 'Contributes semantic token types.')),
type: 'array',

@@ -21,15 +21,15 @@ items: {

type: 'string',
description: ( localize(2783, 'The identifier of the semantic token type')),
description: ( localize(3094, 'The identifier of the semantic token type')),
pattern: typeAndModifierIdPattern,
patternErrorMessage: ( localize(2784, 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*')),
patternErrorMessage: ( localize(3095, 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*')),
},
superType: {
type: 'string',
description: ( localize(2785, 'The super type of the semantic token type')),
description: ( localize(3096, 'The super type of the semantic token type')),
pattern: typeAndModifierIdPattern,
patternErrorMessage: ( localize(2786, 'Super types should be in the form letterOrDigit[_-letterOrDigit]*')),
patternErrorMessage: ( localize(3097, 'Super types should be in the form letterOrDigit[_-letterOrDigit]*')),
},
description: {
type: 'string',
description: ( localize(2787, 'The description of the semantic token type')),
description: ( localize(3098, 'The description of the semantic token type')),
}

@@ -43,3 +43,3 @@ }

jsonSchema: {
description: ( localize(2788, 'Contributes semantic token modifiers.')),
description: ( localize(3099, 'Contributes semantic token modifiers.')),
type: 'array',

@@ -51,9 +51,9 @@ items: {

type: 'string',
description: ( localize(2789, 'The identifier of the semantic token modifier')),
description: ( localize(3100, 'The identifier of the semantic token modifier')),
pattern: typeAndModifierIdPattern,
patternErrorMessage: ( localize(2790, 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*'))
patternErrorMessage: ( localize(3101, 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*'))
},
description: {
type: 'string',
description: ( localize(2791, 'The description of the semantic token modifier'))
description: ( localize(3102, 'The description of the semantic token modifier'))
}

@@ -67,3 +67,3 @@ }

jsonSchema: {
description: ( localize(2792, 'Contributes semantic token scope maps.')),
description: ( localize(3103, 'Contributes semantic token scope maps.')),
type: 'array',

@@ -74,3 +74,3 @@ items: {

language: {
description: ( localize(2793, 'Lists the languge for which the defaults are.')),
description: ( localize(3104, 'Lists the languge for which the defaults are.')),
type: 'string'

@@ -80,3 +80,3 @@ },

description: ( localize(
2794,
3105,
'Maps a semantic token (described by semantic token selector) to one or more textMate scopes used to represent that token.'

@@ -101,3 +101,3 @@ )),

collector.error(( localize(
2795,
3106,
"'configuration.{0}.id' must be defined and can not be empty",

@@ -110,3 +110,3 @@ extensionPoint

collector.error(( localize(
2796,
3107,
"'configuration.{0}.id' must follow the pattern letterOrDigit[-_letterOrDigit]*",

@@ -120,3 +120,3 @@ extensionPoint

collector.error(( localize(
2797,
3108,
"'configuration.{0}.superType' must follow the pattern letterOrDigit[-_letterOrDigit]*",

@@ -129,3 +129,3 @@ extensionPoint

collector.error(( localize(
2798,
3109,
"'configuration.{0}.description' must be defined and can not be empty",

@@ -143,3 +143,3 @@ extensionPoint

if (!extensionValue || !Array.isArray(extensionValue)) {
collector.error(( localize(2799, "'configuration.semanticTokenType' must be an array")));
collector.error(( localize(3110, "'configuration.semanticTokenType' must be an array")));
return;

@@ -165,3 +165,3 @@ }

if (!extensionValue || !Array.isArray(extensionValue)) {
collector.error(( localize(2800, "'configuration.semanticTokenModifier' must be an array")));
collector.error(( localize(3111, "'configuration.semanticTokenModifier' must be an array")));
return;

@@ -187,3 +187,3 @@ }

if (!extensionValue || !Array.isArray(extensionValue)) {
collector.error(( localize(2801, "'configuration.semanticTokenScopes' must be an array")));
collector.error(( localize(3112, "'configuration.semanticTokenScopes' must be an array")));
return;

@@ -193,3 +193,3 @@ }

if (contribution.language && typeof contribution.language !== 'string') {
collector.error(( localize(2802, "'configuration.semanticTokenScopes.language' must be a string")));
collector.error(( localize(3113, "'configuration.semanticTokenScopes.language' must be a string")));
continue;

@@ -199,3 +199,3 @@ }

collector.error(( localize(
2803,
3114,
"'configuration.semanticTokenScopes.scopes' must be defined as an object"

@@ -209,3 +209,3 @@ )));

collector.error(( localize(
2804,
3115,
"'configuration.semanticTokenScopes.scopes' values must be an array of strings"

@@ -221,3 +221,3 @@ )));

collector.error(( localize(
2805,
3116,
"configuration.semanticTokenScopes.scopes': Problems parsing selector {0}.",

@@ -257,4 +257,4 @@ selectorString

], TokenClassificationExtensionPointWorkbenchContribution)));
registerWorkbenchContribution2(TokenClassificationExtensionPointWorkbenchContribution.ID, TokenClassificationExtensionPointWorkbenchContribution, WorkbenchPhase.BlockStartup);
registerWorkbenchContribution2(TokenClassificationExtensionPointWorkbenchContribution.ID, TokenClassificationExtensionPointWorkbenchContribution, 1 );
export { TokenClassificationExtensionPoints };
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc