mirror of
https://github.com/microsoft/vscode.git
synced 2025-12-20 02:08:47 +00:00
Fixes #175125
This commit is contained in:
committed by
Henning Dieterichs
parent
69b125c6b3
commit
72b1ad9f24
@@ -276,8 +276,12 @@ export function wasEventTriggeredRecently(event: Event<any>, timeoutMs: number,
|
||||
}
|
||||
|
||||
/**
|
||||
* This ensures the observable is kept up-to-date.
|
||||
* This is useful when the observables `get` method is used.
|
||||
* This ensures the observable cache is kept up-to-date, even if there are no subscribers.
|
||||
* This is useful when the observables `get` method is used, but not its `read` method.
|
||||
*
|
||||
* (Usually, when no one is actually observing the observable, getting its value will
|
||||
* compute it from scratch, as the cache cannot be trusted:
|
||||
* Because no one is actually observing its value, keeping the cache up-to-date would be too expensive)
|
||||
*/
|
||||
export function keepAlive(observable: IObservable<any>): IDisposable {
|
||||
const o = new KeepAliveObserver();
|
||||
|
||||
@@ -9,6 +9,7 @@ import { Color } from 'vs/base/common/color';
|
||||
import { onUnexpectedError } from 'vs/base/common/errors';
|
||||
import { Disposable, DisposableStore, IDisposable } from 'vs/base/common/lifecycle';
|
||||
import { FileAccess, nodeModulesAsarUnpackedPath, nodeModulesPath } from 'vs/base/common/network';
|
||||
import { IObservable, observableFromEvent } from 'vs/base/common/observable';
|
||||
import { isWeb } from 'vs/base/common/platform';
|
||||
import * as resources from 'vs/base/common/resources';
|
||||
import * as types from 'vs/base/common/types';
|
||||
@@ -270,11 +271,17 @@ export class TextMateTokenizationFeature extends Disposable implements ITextMate
|
||||
if (!r.grammar) {
|
||||
return null;
|
||||
}
|
||||
const maxTokenizationLineLength = observableConfigValue<number>(
|
||||
'editor.maxTokenizationLineLength',
|
||||
languageId,
|
||||
-1,
|
||||
this._configurationService
|
||||
);
|
||||
const tokenization = new TextMateTokenizationSupport(
|
||||
r.grammar,
|
||||
r.initialState,
|
||||
r.containsEmbeddedLanguages,
|
||||
(textModel, tokenStore) => this._workerHost.createBackgroundTokenizer(textModel, tokenStore),
|
||||
(textModel, tokenStore) => this._workerHost.createBackgroundTokenizer(textModel, tokenStore, maxTokenizationLineLength),
|
||||
);
|
||||
tokenization.onDidEncounterLanguage((encodedLanguageId) => {
|
||||
if (!this._encounteredLanguages[encodedLanguageId]) {
|
||||
@@ -283,7 +290,7 @@ export class TextMateTokenizationFeature extends Disposable implements ITextMate
|
||||
this._languageService.requestBasicLanguageFeatures(languageId);
|
||||
}
|
||||
});
|
||||
return new TokenizationSupportWithLineLimit(languageId, encodedLanguageId, tokenization, this._configurationService);
|
||||
return new TokenizationSupportWithLineLimit(encodedLanguageId, tokenization, maxTokenizationLineLength);
|
||||
} catch (err) {
|
||||
if (err.message && err.message === missingTMGrammarErrorMessage) {
|
||||
// Don't log this error message
|
||||
@@ -423,3 +430,14 @@ function validateGrammarExtensionPoint(extensionLocation: URI, syntax: ITMSyntax
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function observableConfigValue<T>(key: string, languageId: string, defaultValue: T, configurationService: IConfigurationService): IObservable<T> {
|
||||
return observableFromEvent(
|
||||
(handleChange) => configurationService.onDidChangeConfiguration(e => {
|
||||
if (e.affectsConfiguration(key, { overrideIdentifier: languageId })) {
|
||||
handleChange(e);
|
||||
}
|
||||
}),
|
||||
() => configurationService.getValue<T>(key, { overrideIdentifier: languageId }) ?? defaultValue,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -7,31 +7,18 @@ import { LanguageId } from 'vs/editor/common/encodedTokenAttributes';
|
||||
import { EncodedTokenizationResult, IBackgroundTokenizationStore, IBackgroundTokenizer, IState, ITokenizationSupport, TokenizationResult } from 'vs/editor/common/languages';
|
||||
import { nullTokenizeEncoded } from 'vs/editor/common/languages/nullTokenize';
|
||||
import { ITextModel } from 'vs/editor/common/model';
|
||||
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
|
||||
import { Disposable } from 'vs/base/common/lifecycle';
|
||||
import { IObservable, keepAlive } from 'vs/base/common/observable';
|
||||
|
||||
export class TokenizationSupportWithLineLimit extends Disposable implements ITokenizationSupport {
|
||||
private _maxTokenizationLineLength: number;
|
||||
|
||||
constructor(
|
||||
private readonly _languageId: string,
|
||||
private readonly _encodedLanguageId: LanguageId,
|
||||
private readonly _actual: ITokenizationSupport,
|
||||
@IConfigurationService private readonly _configurationService: IConfigurationService,
|
||||
private readonly _maxTokenizationLineLength: IObservable<number>,
|
||||
) {
|
||||
super();
|
||||
|
||||
this._maxTokenizationLineLength = this._configurationService.getValue<number>('editor.maxTokenizationLineLength', {
|
||||
overrideIdentifier: this._languageId
|
||||
});
|
||||
|
||||
this._register(this._configurationService.onDidChangeConfiguration(e => {
|
||||
if (e.affectsConfiguration('editor.maxTokenizationLineLength')) {
|
||||
this._maxTokenizationLineLength = this._configurationService.getValue<number>('editor.maxTokenizationLineLength', {
|
||||
overrideIdentifier: this._languageId
|
||||
});
|
||||
}
|
||||
}));
|
||||
this._register(keepAlive(this._maxTokenizationLineLength));
|
||||
}
|
||||
|
||||
getInitialState(): IState {
|
||||
@@ -44,7 +31,7 @@ export class TokenizationSupportWithLineLimit extends Disposable implements ITok
|
||||
|
||||
tokenizeEncoded(line: string, hasEOL: boolean, state: IState): EncodedTokenizationResult {
|
||||
// Do not attempt to tokenize if a line is too long
|
||||
if (line.length >= this._maxTokenizationLineLength) {
|
||||
if (line.length >= this._maxTokenizationLineLength.get()) {
|
||||
return nullTokenizeEncoded(this._encodedLanguageId, state);
|
||||
}
|
||||
|
||||
|
||||
@@ -84,7 +84,7 @@ export class TextMateTokenizationWorker {
|
||||
public acceptNewModel(data: IRawModelData): void {
|
||||
const uri = URI.revive(data.uri);
|
||||
const key = uri.toString();
|
||||
this._models[key] = new TextMateWorkerModel(uri, data.lines, data.EOL, data.versionId, this, data.languageId, data.encodedLanguageId);
|
||||
this._models[key] = new TextMateWorkerModel(uri, data.lines, data.EOL, data.versionId, this, data.languageId, data.encodedLanguageId, data.maxTokenizationLineLength);
|
||||
}
|
||||
|
||||
public acceptModelChanged(strURL: string, e: IModelChangedEvent): void {
|
||||
@@ -111,6 +111,10 @@ export class TextMateTokenizationWorker {
|
||||
grammarFactory?.setTheme(theme, colorMap);
|
||||
}
|
||||
|
||||
public acceptMaxTokenizationLineLength(strURL: string, value: number): void {
|
||||
this._models[strURL].acceptMaxTokenizationLineLength(value);
|
||||
}
|
||||
|
||||
// #endregion
|
||||
|
||||
// #region called by worker model
|
||||
@@ -140,6 +144,7 @@ export interface IRawModelData {
|
||||
EOL: string;
|
||||
languageId: string;
|
||||
encodedLanguageId: LanguageId;
|
||||
maxTokenizationLineLength: number;
|
||||
}
|
||||
|
||||
export function create(ctx: IWorkerContext<TextMateWorkerHost>, createData: ICreateData): TextMateTokenizationWorker {
|
||||
|
||||
@@ -15,6 +15,8 @@ import { TextMateTokenizationSupport } from 'vs/workbench/services/textMate/brow
|
||||
import { StateDeltas } from 'vs/workbench/services/textMate/browser/workerHost/textMateWorkerHost';
|
||||
import { RunOnceScheduler } from 'vs/base/common/async';
|
||||
import { TextMateTokenizationWorker } from './textMate.worker';
|
||||
import { observableValue } from 'vs/base/common/observable';
|
||||
import { TokenizationSupportWithLineLimit } from 'vs/workbench/services/textMate/browser/tokenizationSupport/tokenizationSupportWithLineLimit';
|
||||
|
||||
export class TextMateWorkerModel extends MirrorTextModel {
|
||||
private _tokenizationStateStore: TokenizationStateStore | null;
|
||||
@@ -22,14 +24,28 @@ export class TextMateWorkerModel extends MirrorTextModel {
|
||||
private _languageId: string;
|
||||
private _encodedLanguageId: LanguageId;
|
||||
private _isDisposed: boolean;
|
||||
private readonly _maxTokenizationLineLength = observableValue(
|
||||
'_maxTokenizationLineLength',
|
||||
-1
|
||||
);
|
||||
|
||||
constructor(uri: URI, lines: string[], eol: string, versionId: number, worker: TextMateTokenizationWorker, languageId: string, encodedLanguageId: LanguageId) {
|
||||
constructor(
|
||||
uri: URI,
|
||||
lines: string[],
|
||||
eol: string,
|
||||
versionId: number,
|
||||
worker: TextMateTokenizationWorker,
|
||||
languageId: string,
|
||||
encodedLanguageId: LanguageId,
|
||||
maxTokenizationLineLength: number,
|
||||
) {
|
||||
super(uri, lines, eol, versionId);
|
||||
this._tokenizationStateStore = null;
|
||||
this._worker = worker;
|
||||
this._languageId = languageId;
|
||||
this._encodedLanguageId = encodedLanguageId;
|
||||
this._isDisposed = false;
|
||||
this._maxTokenizationLineLength.set(maxTokenizationLineLength, undefined);
|
||||
this._resetTokenization();
|
||||
}
|
||||
|
||||
@@ -44,7 +60,10 @@ export class TextMateWorkerModel extends MirrorTextModel {
|
||||
this._resetTokenization();
|
||||
}
|
||||
|
||||
private readonly tokenizeDebouncer = new RunOnceScheduler(() => this._tokenize(), 10);
|
||||
private readonly tokenizeDebouncer = new RunOnceScheduler(
|
||||
() => this._tokenize(),
|
||||
10
|
||||
);
|
||||
|
||||
override onEvents(e: IModelChangedEvent): void {
|
||||
super.onEvents(e);
|
||||
@@ -59,9 +78,19 @@ export class TextMateWorkerModel extends MirrorTextModel {
|
||||
this.tokenizeDebouncer.schedule();
|
||||
}
|
||||
|
||||
public acceptMaxTokenizationLineLength(
|
||||
maxTokenizationLineLength: number
|
||||
): void {
|
||||
this._maxTokenizationLineLength.set(maxTokenizationLineLength, undefined);
|
||||
}
|
||||
|
||||
public retokenize(startLineNumber: number, endLineNumberExclusive: number) {
|
||||
if (this._tokenizationStateStore) {
|
||||
for (let lineNumber = startLineNumber; lineNumber < endLineNumberExclusive; lineNumber++) {
|
||||
for (
|
||||
let lineNumber = startLineNumber;
|
||||
lineNumber < endLineNumberExclusive;
|
||||
lineNumber++
|
||||
) {
|
||||
this._tokenizationStateStore.markMustBeTokenized(lineNumber - 1);
|
||||
}
|
||||
this.tokenizeDebouncer.schedule();
|
||||
@@ -74,13 +103,25 @@ export class TextMateWorkerModel extends MirrorTextModel {
|
||||
const languageId = this._languageId;
|
||||
const encodedLanguageId = this._encodedLanguageId;
|
||||
this._worker.getOrCreateGrammar(languageId, encodedLanguageId).then((r) => {
|
||||
if (this._isDisposed || languageId !== this._languageId || encodedLanguageId !== this._encodedLanguageId || !r) {
|
||||
if (
|
||||
this._isDisposed ||
|
||||
languageId !== this._languageId ||
|
||||
encodedLanguageId !== this._encodedLanguageId ||
|
||||
!r
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (r.grammar) {
|
||||
const tokenizationSupport = new TextMateTokenizationSupport(r.grammar, r.initialState, false);
|
||||
this._tokenizationStateStore = new TokenizationStateStore(tokenizationSupport, tokenizationSupport.getInitialState());
|
||||
const tokenizationSupport = new TokenizationSupportWithLineLimit(
|
||||
this._encodedLanguageId,
|
||||
new TextMateTokenizationSupport(r.grammar, r.initialState, false),
|
||||
this._maxTokenizationLineLength
|
||||
);
|
||||
this._tokenizationStateStore = new TokenizationStateStore(
|
||||
tokenizationSupport,
|
||||
tokenizationSupport.getInitialState()
|
||||
);
|
||||
} else {
|
||||
this._tokenizationStateStore = null;
|
||||
}
|
||||
@@ -115,10 +156,26 @@ export class TextMateWorkerModel extends MirrorTextModel {
|
||||
|
||||
const text = this._lines[lineIndex];
|
||||
|
||||
const lineStartState = this._tokenizationStateStore.getBeginState(lineIndex) as StateStack;
|
||||
const tokenizeResult = this._tokenizationStateStore.tokenizationSupport.tokenizeEncoded(text, true, lineStartState);
|
||||
if (this._tokenizationStateStore.setEndState(lineCount, lineIndex, tokenizeResult.endState)) {
|
||||
const delta = diffStateStacksRefEq(lineStartState, tokenizeResult.endState as StateStack);
|
||||
const lineStartState = this._tokenizationStateStore.getBeginState(
|
||||
lineIndex
|
||||
) as StateStack;
|
||||
const tokenizeResult =
|
||||
this._tokenizationStateStore.tokenizationSupport.tokenizeEncoded(
|
||||
text,
|
||||
true,
|
||||
lineStartState
|
||||
);
|
||||
if (
|
||||
this._tokenizationStateStore.setEndState(
|
||||
lineCount,
|
||||
lineIndex,
|
||||
tokenizeResult.endState
|
||||
)
|
||||
) {
|
||||
const delta = diffStateStacksRefEq(
|
||||
lineStartState,
|
||||
tokenizeResult.endState as StateStack
|
||||
);
|
||||
stateDeltaBuilder.setState(lineIndex + 1, delta);
|
||||
}
|
||||
|
||||
@@ -137,7 +194,12 @@ export class TextMateWorkerModel extends MirrorTextModel {
|
||||
}
|
||||
|
||||
const stateDeltas = stateDeltaBuilder.getStateDeltas();
|
||||
this._worker.setTokensAndStates(this._uri, this._versionId, builder.serialize(), stateDeltas);
|
||||
this._worker.setTokensAndStates(
|
||||
this._uri,
|
||||
this._versionId,
|
||||
builder.serialize(),
|
||||
stateDeltas
|
||||
);
|
||||
|
||||
const deltaMs = new Date().getTime() - startTime;
|
||||
if (deltaMs > 20) {
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
import { BugIndicatingError } from 'vs/base/common/errors';
|
||||
import { DisposableStore, IDisposable, toDisposable } from 'vs/base/common/lifecycle';
|
||||
import { AppResourcePath, FileAccess, nodeModulesAsarPath, nodeModulesPath } from 'vs/base/common/network';
|
||||
import { IObservable } from 'vs/base/common/observable';
|
||||
import { isWeb } from 'vs/base/common/platform';
|
||||
import { URI, UriComponents } from 'vs/base/common/uri';
|
||||
import { createWebWorker, MonacoWebWorker } from 'vs/editor/browser/services/webWorker';
|
||||
@@ -122,7 +123,7 @@ export class TextMateWorkerHost implements IDisposable {
|
||||
}
|
||||
|
||||
// Will be recreated when worker is killed (because tokenizer is re-registered when languages change)
|
||||
public createBackgroundTokenizer(textModel: ITextModel, tokenStore: IBackgroundTokenizationStore): IBackgroundTokenizer | undefined {
|
||||
public createBackgroundTokenizer(textModel: ITextModel, tokenStore: IBackgroundTokenizationStore, maxTokenizationLineLength: IObservable<number>): IBackgroundTokenizer | undefined {
|
||||
if (this._workerTokenizerControllers.has(textModel.uri.toString())) {
|
||||
throw new BugIndicatingError();
|
||||
}
|
||||
@@ -144,7 +145,7 @@ export class TextMateWorkerHost implements IDisposable {
|
||||
}
|
||||
|
||||
store.add(keepAliveWhenAttached(textModel, () => {
|
||||
const controller = new TextMateWorkerTokenizerController(textModel, workerProxy, this._languageService.languageIdCodec, tokenStore, INITIAL, this._configurationService);
|
||||
const controller = new TextMateWorkerTokenizerController(textModel, workerProxy, this._languageService.languageIdCodec, tokenStore, INITIAL, this._configurationService, maxTokenizationLineLength);
|
||||
this._workerTokenizerControllers.set(textModel.uri.toString(), controller);
|
||||
|
||||
return toDisposable(() => {
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { Disposable } from 'vs/base/common/lifecycle';
|
||||
import { IObservable, keepAlive, observableFromEvent } from 'vs/base/common/observable';
|
||||
import { IObservable, autorun, keepAlive, observableFromEvent } from 'vs/base/common/observable';
|
||||
import { countEOL } from 'vs/editor/common/core/eolCounter';
|
||||
import { LineRange } from 'vs/editor/common/core/lineRange';
|
||||
import { Range } from 'vs/editor/common/core/range';
|
||||
@@ -37,6 +37,7 @@ export class TextMateWorkerTokenizerController extends Disposable {
|
||||
private readonly _backgroundTokenizationStore: IBackgroundTokenizationStore,
|
||||
private readonly _initialState: StateStack,
|
||||
private readonly _configurationService: IConfigurationService,
|
||||
private readonly _maxTokenizationLineLength: IObservable<number>,
|
||||
) {
|
||||
super();
|
||||
|
||||
@@ -73,7 +74,13 @@ export class TextMateWorkerTokenizerController extends Disposable {
|
||||
EOL: this._model.getEOL(),
|
||||
languageId,
|
||||
encodedLanguageId,
|
||||
maxTokenizationLineLength: this._maxTokenizationLineLength.get(),
|
||||
});
|
||||
|
||||
this._register(autorun('update maxTokenizationLineLength', reader => {
|
||||
const maxTokenizationLineLength = this._maxTokenizationLineLength.read(reader);
|
||||
this._worker.acceptMaxTokenizationLineLength(this._model.uri.toString(), maxTokenizationLineLength);
|
||||
}));
|
||||
}
|
||||
|
||||
get shouldLog() {
|
||||
|
||||
Reference in New Issue
Block a user