Remove areas from the semantic tokens API

This commit is contained in:
Alex Dima
2019-12-02 10:31:54 +01:00
parent 9995919f55
commit d6dae16f40
12 changed files with 544 additions and 986 deletions

View File

@@ -351,9 +351,9 @@ export function createApiFactoryAndRegisterActors(accessor: ServicesAccessor): I
registerOnTypeFormattingEditProvider(selector: vscode.DocumentSelector, provider: vscode.OnTypeFormattingEditProvider, firstTriggerCharacter: string, ...moreTriggerCharacters: string[]): vscode.Disposable {
return extHostLanguageFeatures.registerOnTypeFormattingEditProvider(extension, checkSelector(selector), provider, [firstTriggerCharacter].concat(moreTriggerCharacters));
},
registerSemanticColoringProvider(selector: vscode.DocumentSelector, provider: vscode.SemanticColoringProvider, legend: vscode.SemanticColoringLegend): vscode.Disposable {
registerSemanticTokensProvider(selector: vscode.DocumentSelector, provider: vscode.SemanticTokensProvider, legend: vscode.SemanticTokensLegend): vscode.Disposable {
checkProposedApiEnabled(extension);
return extHostLanguageFeatures.registerSemanticColoringProvider(extension, checkSelector(selector), provider, legend);
return extHostLanguageFeatures.registerSemanticTokensProvider(extension, checkSelector(selector), provider, legend);
},
registerSignatureHelpProvider(selector: vscode.DocumentSelector, provider: vscode.SignatureHelpProvider, firstItem?: string | vscode.SignatureHelpProviderMetadata, ...remaining: string[]): vscode.Disposable {
if (typeof firstItem === 'object') {
@@ -893,9 +893,11 @@ export function createApiFactoryAndRegisterActors(accessor: ServicesAccessor): I
RelativePattern: extHostTypes.RelativePattern,
ResolvedAuthority: extHostTypes.ResolvedAuthority,
RemoteAuthorityResolverError: extHostTypes.RemoteAuthorityResolverError,
SemanticColoring: extHostTypes.SemanticColoring,
SemanticColoringArea: extHostTypes.SemanticColoringArea,
SemanticColoringLegend: extHostTypes.SemanticColoringLegend,
SemanticTokensLegend: extHostTypes.SemanticTokensLegend,
SemanticTokensBuilder: extHostTypes.SemanticTokensBuilder,
SemanticTokens: extHostTypes.SemanticTokens,
SemanticTokensEdits: extHostTypes.SemanticTokensEdits,
SemanticTokensEdit: extHostTypes.SemanticTokensEdit,
Selection: extHostTypes.Selection,
SelectionRange: extHostTypes.SelectionRange,
ShellExecution: extHostTypes.ShellExecution,

View File

@@ -354,7 +354,7 @@ export interface MainThreadLanguageFeaturesShape extends IDisposable {
$registerOnTypeFormattingSupport(handle: number, selector: IDocumentFilterDto[], autoFormatTriggerCharacters: string[], extensionId: ExtensionIdentifier): void;
$registerNavigateTypeSupport(handle: number): void;
$registerRenameSupport(handle: number, selector: IDocumentFilterDto[], supportsResolveInitialValues: boolean): void;
$registerSemanticColoringProvider(handle: number, selector: IDocumentFilterDto[], legend: modes.SemanticColoringLegend): void;
$registerSemanticTokensProvider(handle: number, selector: IDocumentFilterDto[], legend: modes.SemanticTokensLegend): void;
$registerSuggestSupport(handle: number, selector: IDocumentFilterDto[], triggerCharacters: string[], supportsResolveDetails: boolean, extensionId: ExtensionIdentifier): void;
$registerSignatureHelpProvider(handle: number, selector: IDocumentFilterDto[], metadata: ISignatureHelpProviderMetadataDto): void;
$registerDocumentLinkProvider(handle: number, selector: IDocumentFilterDto[], supportsResolve: boolean): void;
@@ -1167,8 +1167,8 @@ export interface ExtHostLanguageFeaturesShape {
$releaseWorkspaceSymbols(handle: number, id: number): void;
$provideRenameEdits(handle: number, resource: UriComponents, position: IPosition, newName: string, token: CancellationToken): Promise<IWorkspaceEditDto | undefined>;
$resolveRenameLocation(handle: number, resource: UriComponents, position: IPosition, token: CancellationToken): Promise<modes.RenameLocation | undefined>;
$provideSemanticColoring(handle: number, resource: UriComponents, previousSemanticColoringResultId: number, token: CancellationToken): Promise<VSBuffer | null>;
$releaseSemanticColoring(handle: number, semanticColoringResultId: number): void;
$provideSemanticTokens(handle: number, resource: UriComponents, ranges: IRange[] | null, previousResultId: number, token: CancellationToken): Promise<VSBuffer | null>;
$releaseSemanticTokens(handle: number, semanticColoringResultId: number): void;
$provideCompletionItems(handle: number, resource: UriComponents, position: IPosition, context: modes.CompletionContext, token: CancellationToken): Promise<ISuggestResultDto | undefined>;
$resolveCompletionItem(handle: number, resource: UriComponents, position: IPosition, id: ChainedCacheId, token: CancellationToken): Promise<ISuggestDataDto | undefined>;
$releaseCompletionItems(handle: number, id: number): void;

View File

@@ -7,7 +7,7 @@ import { URI, UriComponents } from 'vs/base/common/uri';
import { mixin } from 'vs/base/common/objects';
import * as vscode from 'vscode';
import * as typeConvert from 'vs/workbench/api/common/extHostTypeConverters';
import { Range, Disposable, CompletionList, SnippetString, CodeActionKind, SymbolInformation, DocumentSymbol, SemanticColoringArea } from 'vs/workbench/api/common/extHostTypes';
import { Range, Disposable, CompletionList, SnippetString, CodeActionKind, SymbolInformation, DocumentSymbol, SemanticTokensEdits } from 'vs/workbench/api/common/extHostTypes';
import { ISingleEditOperation } from 'vs/editor/common/model';
import * as modes from 'vs/editor/common/modes';
import { ExtHostDocuments } from 'vs/workbench/api/common/extHostDocuments';
@@ -27,7 +27,7 @@ import { ExtensionIdentifier, IExtensionDescription } from 'vs/platform/extensio
import { IURITransformer } from 'vs/base/common/uriIpc';
import { DisposableStore, dispose } from 'vs/base/common/lifecycle';
import { VSBuffer } from 'vs/base/common/buffer';
import { encodeSemanticTokensDto, ISemanticTokensDto, ISemanticTokensAreaDto } from 'vs/workbench/api/common/shared/semanticTokens';
import { encodeSemanticTokensDto } from 'vs/workbench/api/common/shared/semanticTokens';
import { IdGenerator } from 'vs/base/common/idGenerator';
// --- adapter
@@ -616,62 +616,40 @@ class RenameAdapter {
}
}
export const enum SemanticColoringConstants {
/**
* Let's aim at having 8KB buffers if possible...
* So that would be 8192 / (5 * 4) = 409.6 tokens per area
*/
DesiredTokensPerArea = 400,
/**
* Try to keep the total number of areas under 1024 if possible,
* simply compensate by having more tokens per area...
*/
DesiredMaxAreas = 1024,
/**
* Threshold for merging multiple delta areas and sending a full area.
*/
MinTokensPerArea = 50
class SemanticTokensPreviousResult {
constructor(
public readonly resultId: string | undefined,
public readonly tokens?: Uint32Array,
) { }
}
interface ISemanticColoringAreaPair {
data: Uint32Array;
dto: ISemanticTokensAreaDto;
}
export class SemanticTokensAdapter {
export class SemanticColoringAdapter {
private readonly _previousResults: Map<number, Uint32Array[]>;
private readonly _splitSingleAreaTokenCountThreshold: number;
private readonly _previousResults: Map<number, SemanticTokensPreviousResult>;
private _nextResultId = 1;
constructor(
private readonly _documents: ExtHostDocuments,
private readonly _provider: vscode.SemanticColoringProvider,
private readonly _desiredTokensPerArea = SemanticColoringConstants.DesiredTokensPerArea,
private readonly _desiredMaxAreas = SemanticColoringConstants.DesiredMaxAreas,
private readonly _minTokensPerArea = SemanticColoringConstants.MinTokensPerArea
private readonly _provider: vscode.SemanticTokensProvider,
) {
this._previousResults = new Map<number, Uint32Array[]>();
this._splitSingleAreaTokenCountThreshold = Math.round(1.5 * this._desiredTokensPerArea);
this._previousResults = new Map<number, SemanticTokensPreviousResult>();
}
provideSemanticColoring(resource: URI, previousSemanticColoringResultId: number, token: CancellationToken): Promise<VSBuffer | null> {
provideSemanticTokens(resource: URI, ranges: IRange[] | null, previousResultId: number, token: CancellationToken): Promise<VSBuffer | null> {
const doc = this._documents.getDocument(resource);
return asPromise(() => this._provider.provideSemanticColoring(doc, token)).then(value => {
const previousResult = (previousResultId !== 0 ? this._previousResults.get(previousResultId) : null);
const opts: vscode.SemanticTokensRequestOptions = {
ranges: (Array.isArray(ranges) && ranges.length > 0 ? ranges.map<Range>(typeConvert.Range.to) : undefined),
previousResultId: (previousResult ? previousResult.resultId : undefined)
};
return asPromise(() => this._provider.provideSemanticTokens(doc, opts, token)).then(value => {
if (!value) {
return null;
}
const oldAreas = (previousSemanticColoringResultId !== 0 ? this._previousResults.get(previousSemanticColoringResultId) : null);
if (oldAreas) {
this._previousResults.delete(previousSemanticColoringResultId);
return this._deltaEncodeAreas(oldAreas, value.areas);
if (previousResult) {
this._previousResults.delete(previousResultId);
}
return this._fullEncodeAreas(value.areas);
return this._send(SemanticTokensAdapter._convertToEdits(previousResult, value), value);
});
}
@@ -679,298 +657,77 @@ export class SemanticColoringAdapter {
this._previousResults.delete(semanticColoringResultId);
}
private _deltaEncodeAreas(oldAreas: Uint32Array[], newAreas: SemanticColoringArea[]): VSBuffer {
if (newAreas.length > 1) {
// this is a fancy provider which is smart enough to break things into good areas
// we therefore try to match old areas only by object identity
const oldAreasIndexMap = new Map<Uint32Array, number>();
for (let i = 0, len = oldAreas.length; i < len; i++) {
oldAreasIndexMap.set(oldAreas[i], i);
}
let result: ISemanticColoringAreaPair[] = [];
for (let i = 0, len = newAreas.length; i < len; i++) {
const newArea = newAreas[i];
if (oldAreasIndexMap.has(newArea.data)) {
// great! we can reuse this area
const oldIndex = oldAreasIndexMap.get(newArea.data)!;
result.push({
data: newArea.data,
dto: {
type: 'delta',
line: newArea.line,
oldIndex: oldIndex
}
});
} else {
result.push({
data: newArea.data,
dto: {
type: 'full',
line: newArea.line,
data: newArea.data
}
});
}
}
return this._saveResultAndEncode(result);
}
return this._deltaEncodeArea(oldAreas, newAreas[0]);
private static _isSemanticTokens(v: vscode.SemanticTokens | vscode.SemanticTokensEdits): v is vscode.SemanticTokens {
return v && !!((v as vscode.SemanticTokens).data);
}
private static _oldAreaAppearsInNewArea(oldAreaData: Uint32Array, oldAreaTokenCount: number, newAreaData: Uint32Array, newAreaOffset: number): boolean {
const newTokenStartDeltaLine = newAreaData[5 * newAreaOffset];
// check that each and every value from `oldArea` is equal to `area`
for (let j = 0; j < oldAreaTokenCount; j++) {
const oldOffset = 5 * j;
const newOffset = 5 * (j + newAreaOffset);
if (
(oldAreaData[oldOffset] !== newAreaData[newOffset] - newTokenStartDeltaLine)
|| (oldAreaData[oldOffset + 1] !== newAreaData[newOffset + 1])
|| (oldAreaData[oldOffset + 2] !== newAreaData[newOffset + 2])
|| (oldAreaData[oldOffset + 3] !== newAreaData[newOffset + 3])
|| (oldAreaData[oldOffset + 4] !== newAreaData[newOffset + 4])
) {
return false;
}
}
return true;
private static _isSemanticTokensEdits(v: vscode.SemanticTokens | vscode.SemanticTokensEdits): v is vscode.SemanticTokensEdits {
return v && Array.isArray((v as vscode.SemanticTokensEdits).edits);
}
private _deltaEncodeArea(oldAreas: Uint32Array[], newArea: SemanticColoringArea): VSBuffer {
const newAreaData = newArea.data;
const prependAreas: ISemanticColoringAreaPair[] = [];
const appendAreas: ISemanticColoringAreaPair[] = [];
private static _convertToEdits(previousResult: SemanticTokensPreviousResult | null | undefined, newResult: vscode.SemanticTokens | vscode.SemanticTokensEdits): vscode.SemanticTokens | vscode.SemanticTokensEdits {
if (!SemanticTokensAdapter._isSemanticTokens(newResult)) {
return newResult;
}
if (!previousResult || !previousResult.tokens) {
return newResult;
}
const oldData = previousResult.tokens;
const oldLength = oldData.length;
const newData = newResult.data;
const newLength = newData.length;
// Try to find appearences of `oldAreas` inside `area`.
let newTokenStartIndex = 0;
let newTokenEndIndex = (newAreaData.length / 5) | 0;
let oldAreaUsedIndex = -1;
for (let i = 0, len = oldAreas.length; i < len; i++) {
const oldAreaData = oldAreas[i];
const oldAreaTokenCount = (oldAreaData.length / 5) | 0;
if (oldAreaTokenCount === 0) {
// skip old empty areas
continue;
}
if (newTokenEndIndex - newTokenStartIndex < oldAreaTokenCount) {
// there are too many old tokens, this cannot work
break;
}
let commonPrefixLength = 0;
const maxCommonPrefixLength = Math.min(oldLength, newLength);
while (commonPrefixLength < maxCommonPrefixLength && oldData[commonPrefixLength] === newData[commonPrefixLength]) {
commonPrefixLength++;
}
const newAreaOffset = newTokenStartIndex;
const newTokenStartDeltaLine = newAreaData[5 * newAreaOffset];
const isEqual = SemanticColoringAdapter._oldAreaAppearsInNewArea(oldAreaData, oldAreaTokenCount, newAreaData, newAreaOffset);
if (!isEqual) {
break;
}
newTokenStartIndex += oldAreaTokenCount;
if (commonPrefixLength === oldLength && commonPrefixLength === newLength) {
// complete overlap!
return new SemanticTokensEdits([], newResult.resultId);
}
oldAreaUsedIndex = i;
prependAreas.push({
data: oldAreaData,
dto: {
type: 'delta',
line: newArea.line + newTokenStartDeltaLine,
oldIndex: i
}
let commonSuffixLength = 0;
const maxCommonSuffixLength = maxCommonPrefixLength - commonPrefixLength;
while (commonSuffixLength < maxCommonSuffixLength && oldData[oldLength - commonSuffixLength - 1] === newData[newLength - commonSuffixLength - 1]) {
commonSuffixLength++;
}
return new SemanticTokensEdits([{
start: commonPrefixLength,
deleteCount: (oldLength - commonPrefixLength - commonSuffixLength),
data: newData.subarray(commonPrefixLength, newLength - commonSuffixLength)
}], newResult.resultId);
}
private _send(value: vscode.SemanticTokens | vscode.SemanticTokensEdits, original: vscode.SemanticTokens | vscode.SemanticTokensEdits): VSBuffer | null {
if (SemanticTokensAdapter._isSemanticTokens(value)) {
const myId = this._nextResultId++;
this._previousResults.set(myId, new SemanticTokensPreviousResult(value.resultId, value.data));
return encodeSemanticTokensDto({
id: myId,
type: 'full',
data: value.data
});
}
for (let i = oldAreas.length - 1; i > oldAreaUsedIndex; i--) {
const oldAreaData = oldAreas[i];
const oldAreaTokenCount = (oldAreaData.length / 5) | 0;
if (oldAreaTokenCount === 0) {
// skip old empty areas
continue;
if (SemanticTokensAdapter._isSemanticTokensEdits(value)) {
const myId = this._nextResultId++;
if (SemanticTokensAdapter._isSemanticTokens(original)) {
// store the original
this._previousResults.set(myId, new SemanticTokensPreviousResult(original.resultId, original.data));
} else {
this._previousResults.set(myId, new SemanticTokensPreviousResult(value.resultId));
}
if (newTokenEndIndex - newTokenStartIndex < oldAreaTokenCount) {
// there are too many old tokens, this cannot work
break;
}
const newAreaOffset = (newTokenEndIndex - oldAreaTokenCount);
const newTokenStartDeltaLine = newAreaData[5 * newAreaOffset];
const isEqual = SemanticColoringAdapter._oldAreaAppearsInNewArea(oldAreaData, oldAreaTokenCount, newAreaData, newAreaOffset);
if (!isEqual) {
break;
}
newTokenEndIndex -= oldAreaTokenCount;
appendAreas.unshift({
data: oldAreaData,
dto: {
type: 'delta',
line: newArea.line + newTokenStartDeltaLine,
oldIndex: i
}
return encodeSemanticTokensDto({
id: myId,
type: 'delta',
deltas: (value.edits || []).map(edit => ({ start: edit.start, deleteCount: edit.deleteCount, data: edit.data }))
});
}
if (prependAreas.length === 0 && appendAreas.length === 0) {
// There is no reuse possibility!
return this._fullEncodeAreas([newArea]);
}
if (newTokenStartIndex === newTokenEndIndex) {
// 100% reuse!
return this._saveResultAndEncode(prependAreas.concat(appendAreas));
}
// It is clear at this point that there will be at least one full area.
// Expand the mid area if the areas next to it are too small
while (prependAreas.length > 0) {
const tokenCount = (prependAreas[prependAreas.length - 1].data.length / 5);
if (tokenCount < this._minTokensPerArea) {
newTokenStartIndex -= tokenCount;
prependAreas.pop();
} else {
break;
}
}
while (appendAreas.length > 0) {
const tokenCount = (appendAreas[0].data.length / 5);
if (tokenCount < this._minTokensPerArea) {
newTokenEndIndex += tokenCount;
appendAreas.shift();
} else {
break;
}
}
// Extract the mid area
const newTokenStartDeltaLine = newAreaData[5 * newTokenStartIndex];
const newMidAreaData = new Uint32Array(5 * (newTokenEndIndex - newTokenStartIndex));
for (let tokenIndex = newTokenStartIndex; tokenIndex < newTokenEndIndex; tokenIndex++) {
const srcOffset = 5 * tokenIndex;
const deltaLine = newAreaData[srcOffset];
const startCharacter = newAreaData[srcOffset + 1];
const endCharacter = newAreaData[srcOffset + 2];
const tokenType = newAreaData[srcOffset + 3];
const tokenModifiers = newAreaData[srcOffset + 4];
const destOffset = 5 * (tokenIndex - newTokenStartIndex);
newMidAreaData[destOffset] = deltaLine - newTokenStartDeltaLine;
newMidAreaData[destOffset + 1] = startCharacter;
newMidAreaData[destOffset + 2] = endCharacter;
newMidAreaData[destOffset + 3] = tokenType;
newMidAreaData[destOffset + 4] = tokenModifiers;
}
const newMidArea = new SemanticColoringArea(newArea.line + newTokenStartDeltaLine, newMidAreaData);
const newMidAreas = this._splitAreaIntoMultipleAreasIfNecessary(newMidArea);
const newMidAreasPairs: ISemanticColoringAreaPair[] = newMidAreas.map(a => {
return {
data: a.data,
dto: {
type: 'full',
line: a.line,
data: a.data,
}
};
});
return this._saveResultAndEncode(prependAreas.concat(newMidAreasPairs).concat(appendAreas));
}
private _fullEncodeAreas(areas: SemanticColoringArea[]): VSBuffer {
if (areas.length === 1) {
areas = this._splitAreaIntoMultipleAreasIfNecessary(areas[0]);
}
return this._saveResultAndEncode(areas.map(a => {
return {
data: a.data,
dto: {
type: 'full',
line: a.line,
data: a.data
}
};
}));
}
private _saveResultAndEncode(areas: ISemanticColoringAreaPair[]): VSBuffer {
const myId = this._nextResultId++;
this._previousResults.set(myId, areas.map(a => a.data));
console.log(`_saveResultAndEncode: ${myId} --> ${areas.map(a => `${a.dto.line}-${a.dto.type}(${a.data.length / 5})`).join(', ')}`);
const dto: ISemanticTokensDto = {
id: myId,
areas: areas.map(a => a.dto)
};
return encodeSemanticTokensDto(dto);
}
private _splitAreaIntoMultipleAreasIfNecessary(area: vscode.SemanticColoringArea): SemanticColoringArea[] {
const srcAreaLine = area.line;
const srcAreaData = area.data;
const tokenCount = (srcAreaData.length / 5) | 0;
if (tokenCount <= this._splitSingleAreaTokenCountThreshold) {
return [area];
}
const tokensPerArea = Math.max(Math.ceil(tokenCount / this._desiredMaxAreas), this._desiredTokensPerArea);
let result: SemanticColoringArea[] = [];
let tokenIndex = 0;
while (tokenIndex < tokenCount) {
const tokenStartIndex = tokenIndex;
let tokenEndIndex = Math.min(tokenStartIndex + tokensPerArea, tokenCount);
// Keep tokens on the same line in the same area...
if (tokenEndIndex < tokenCount) {
let smallAvoidDeltaLine = srcAreaData[5 * tokenEndIndex];
let smallTokenEndIndex = tokenEndIndex;
while (smallTokenEndIndex - 1 > tokenStartIndex && srcAreaData[5 * (smallTokenEndIndex - 1)] === smallAvoidDeltaLine) {
smallTokenEndIndex--;
}
if (smallTokenEndIndex - 1 === tokenStartIndex) {
// there are so many tokens on this line that our area would be empty, we must now go right
let bigAvoidDeltaLine = srcAreaData[5 * (tokenEndIndex - 1)];
let bigTokenEndIndex = tokenEndIndex;
while (bigTokenEndIndex + 1 < tokenCount && srcAreaData[5 * (bigTokenEndIndex + 1)] === bigAvoidDeltaLine) {
bigTokenEndIndex++;
}
tokenEndIndex = bigTokenEndIndex;
} else {
tokenEndIndex = smallTokenEndIndex;
}
}
let destAreaLine = 0;
const destAreaData = new Uint32Array((tokenEndIndex - tokenStartIndex) * 5);
while (tokenIndex < tokenEndIndex) {
const srcOffset = 5 * tokenIndex;
const line = srcAreaLine + srcAreaData[srcOffset];
const startCharacter = srcAreaData[srcOffset + 1];
const endCharacter = srcAreaData[srcOffset + 2];
const tokenType = srcAreaData[srcOffset + 3];
const tokenModifiers = srcAreaData[srcOffset + 4];
if (tokenIndex === tokenStartIndex) {
destAreaLine = line;
}
const destOffset = 5 * (tokenIndex - tokenStartIndex);
destAreaData[destOffset] = line - destAreaLine;
destAreaData[destOffset + 1] = startCharacter;
destAreaData[destOffset + 2] = endCharacter;
destAreaData[destOffset + 3] = tokenType;
destAreaData[destOffset + 4] = tokenModifiers;
tokenIndex++;
}
result.push(new SemanticColoringArea(destAreaLine, destAreaData));
}
return result;
return null;
}
}
@@ -1481,7 +1238,7 @@ class CallHierarchyAdapter {
type Adapter = DocumentSymbolAdapter | CodeLensAdapter | DefinitionAdapter | HoverAdapter
| DocumentHighlightAdapter | ReferenceAdapter | CodeActionAdapter | DocumentFormattingAdapter
| RangeFormattingAdapter | OnTypeFormattingAdapter | NavigateTypeAdapter | RenameAdapter
| SemanticColoringAdapter | SuggestAdapter | SignatureHelpAdapter | LinkProviderAdapter
| SemanticTokensAdapter | SuggestAdapter | SignatureHelpAdapter | LinkProviderAdapter
| ImplementationAdapter | TypeDefinitionAdapter | ColorProviderAdapter | FoldingProviderAdapter
| DeclarationAdapter | SelectionRangeAdapter | CallHierarchyAdapter;
@@ -1809,18 +1566,18 @@ export class ExtHostLanguageFeatures implements extHostProtocol.ExtHostLanguageF
//#region semantic coloring
registerSemanticColoringProvider(extension: IExtensionDescription, selector: vscode.DocumentSelector, provider: vscode.SemanticColoringProvider, legend: vscode.SemanticColoringLegend): vscode.Disposable {
const handle = this._addNewAdapter(new SemanticColoringAdapter(this._documents, provider), extension);
this._proxy.$registerSemanticColoringProvider(handle, this._transformDocumentSelector(selector), legend);
registerSemanticTokensProvider(extension: IExtensionDescription, selector: vscode.DocumentSelector, provider: vscode.SemanticTokensProvider, legend: vscode.SemanticTokensLegend): vscode.Disposable {
const handle = this._addNewAdapter(new SemanticTokensAdapter(this._documents, provider), extension);
this._proxy.$registerSemanticTokensProvider(handle, this._transformDocumentSelector(selector), legend);
return this._createDisposable(handle);
}
$provideSemanticColoring(handle: number, resource: UriComponents, previousSemanticColoringResultId: number, token: CancellationToken): Promise<VSBuffer | null> {
return this._withAdapter(handle, SemanticColoringAdapter, adapter => adapter.provideSemanticColoring(URI.revive(resource), previousSemanticColoringResultId, token), null);
$provideSemanticTokens(handle: number, resource: UriComponents, ranges: IRange[] | null, previousResultId: number, token: CancellationToken): Promise<VSBuffer | null> {
return this._withAdapter(handle, SemanticTokensAdapter, adapter => adapter.provideSemanticTokens(URI.revive(resource), ranges, previousResultId, token), null);
}
$releaseSemanticColoring(handle: number, semanticColoringResultId: number): void {
this._withAdapter(handle, SemanticColoringAdapter, adapter => adapter.releaseSemanticColoring(semanticColoringResultId), undefined);
$releaseSemanticTokens(handle: number, semanticColoringResultId: number): void {
this._withAdapter(handle, SemanticTokensAdapter, adapter => adapter.releaseSemanticColoring(semanticColoringResultId), undefined);
}
//#endregion

View File

@@ -2368,7 +2368,7 @@ export enum CommentMode {
//#region Semantic Coloring
export class SemanticColoringLegend {
export class SemanticTokensLegend {
public readonly tokenTypes: string[];
public readonly tokenModifiers: string[];
@@ -2378,21 +2378,74 @@ export class SemanticColoringLegend {
}
}
export class SemanticColoringArea {
public readonly line: number;
public readonly data: Uint32Array;
export class SemanticTokensBuilder {
constructor(line: number, data: Uint32Array) {
this.line = line;
private _prevLine: number;
private _prevChar: number;
private _data: number[];
private _dataLen: number;
constructor() {
this._prevLine = 0;
this._prevChar = 0;
this._data = [];
this._dataLen = 0;
}
public push(line: number, char: number, length: number, tokenType: number, tokenModifiers: number): void {
let pushLine = line;
let pushChar = char;
if (this._dataLen > 0) {
pushLine -= this._prevLine;
if (pushLine === 0) {
pushChar -= this._prevChar;
}
}
this._data[this._dataLen++] = pushLine;
this._data[this._dataLen++] = pushChar;
this._data[this._dataLen++] = length;
this._data[this._dataLen++] = tokenType;
this._data[this._dataLen++] = tokenModifiers;
this._prevLine = line;
this._prevChar = char;
}
public build(): Uint32Array {
return new Uint32Array(this._data);
}
}
export class SemanticTokens {
readonly resultId?: string;
readonly data: Uint32Array;
constructor(data: Uint32Array, resultId?: string) {
this.resultId = resultId;
this.data = data;
}
}
export class SemanticColoring {
public readonly areas: SemanticColoringArea[];
export class SemanticTokensEdit {
readonly start: number;
readonly deleteCount: number;
readonly data?: Uint32Array;
constructor(areas: SemanticColoringArea[]) {
this.areas = areas;
constructor(start: number, deleteCount: number, data?: Uint32Array) {
this.start = start;
this.deleteCount = deleteCount;
this.data = data;
}
}
export class SemanticTokensEdits {
readonly resultId?: string;
readonly edits: SemanticTokensEdit[];
constructor(edits: SemanticTokensEdit[], resultId?: string) {
this.resultId = resultId;
this.edits = edits;
}
}

View File

@@ -5,47 +5,71 @@
import { VSBuffer } from 'vs/base/common/buffer';
export interface ISemanticTokensFullAreaDto {
export interface IFullSemanticTokensDto {
id: number;
type: 'full';
line: number;
data: Uint32Array;
}
export interface ISemanticTokensDeltaAreaDto {
type: 'delta';
line: number;
oldIndex: number;
}
export type ISemanticTokensAreaDto = ISemanticTokensFullAreaDto | ISemanticTokensDeltaAreaDto;
export interface ISemanticTokensDto {
export interface IDeltaSemanticTokensDto {
id: number;
areas: ISemanticTokensAreaDto[];
type: 'delta';
deltas: { start: number; deleteCount: number; data?: Uint32Array; }[];
}
const enum EncodedSemanticTokensAreaType {
export type ISemanticTokensDto = IFullSemanticTokensDto | IDeltaSemanticTokensDto;
const enum EncodedSemanticTokensType {
Full = 1,
Delta = 2
}
export function encodeSemanticTokensDto(semanticTokens: ISemanticTokensDto): VSBuffer {
const buff = VSBuffer.alloc(encodedSize(semanticTokens));
const buff = VSBuffer.alloc(encodedSize2(semanticTokens));
let offset = 0;
buff.writeUInt32BE(semanticTokens.id, offset); offset += 4;
buff.writeUInt32BE(semanticTokens.areas.length, offset); offset += 4;
for (let i = 0; i < semanticTokens.areas.length; i++) {
offset = encodeArea(semanticTokens.areas[i], buff, offset);
if (semanticTokens.type === 'full') {
buff.writeUInt8(EncodedSemanticTokensType.Full, offset); offset += 1;
buff.writeUInt32BE(semanticTokens.data.length, offset); offset += 4;
for (const uint of semanticTokens.data) {
buff.writeUInt32BE(uint, offset); offset += 4;
}
} else {
buff.writeUInt8(EncodedSemanticTokensType.Delta, offset); offset += 1;
buff.writeUInt32BE(semanticTokens.deltas.length, offset); offset += 4;
for (const delta of semanticTokens.deltas) {
buff.writeUInt32BE(delta.start, offset); offset += 4;
buff.writeUInt32BE(delta.deleteCount, offset); offset += 4;
if (delta.data) {
buff.writeUInt32BE(delta.data.length, offset); offset += 4;
for (const uint of delta.data) {
buff.writeUInt32BE(uint, offset); offset += 4;
}
} else {
buff.writeUInt32BE(0, offset); offset += 4;
}
}
}
return buff;
}
function encodedSize(semanticTokens: ISemanticTokensDto): number {
function encodedSize2(semanticTokens: ISemanticTokensDto): number {
let result = 0;
result += 4; // etag
result += 4; // area count
for (let i = 0; i < semanticTokens.areas.length; i++) {
result += encodedAreaSize(semanticTokens.areas[i]);
result += 4; // id
result += 1; // type
if (semanticTokens.type === 'full') {
result += 4; // data length
result += semanticTokens.data.byteLength;
} else {
result += 4; // delta count
for (const delta of semanticTokens.deltas) {
result += 4; // start
result += 4; // deleteCount
result += 4; // data length
if (delta.data) {
result += delta.data.byteLength;
}
}
}
return result;
}
@@ -53,84 +77,37 @@ function encodedSize(semanticTokens: ISemanticTokensDto): number {
export function decodeSemanticTokensDto(buff: VSBuffer): ISemanticTokensDto {
let offset = 0;
const id = buff.readUInt32BE(offset); offset += 4;
const areasCount = buff.readUInt32BE(offset); offset += 4;
let areas: ISemanticTokensAreaDto[] = [];
for (let i = 0; i < areasCount; i++) {
offset = decodeArea(buff, offset, areas);
const type: EncodedSemanticTokensType = buff.readUInt8(offset); offset += 1;
if (type === EncodedSemanticTokensType.Full) {
const length = buff.readUInt32BE(offset); offset += 4;
const data = new Uint32Array(length);
for (let j = 0; j < length; j++) {
data[j] = buff.readUInt32BE(offset); offset += 4;
}
return {
id: id,
type: 'full',
data: data
};
}
const deltaCount = buff.readUInt32BE(offset); offset += 4;
let deltas: { start: number; deleteCount: number; data?: Uint32Array; }[] = [];
for (let i = 0; i < deltaCount; i++) {
const start = buff.readUInt32BE(offset); offset += 4;
const deleteCount = buff.readUInt32BE(offset); offset += 4;
const length = buff.readUInt32BE(offset); offset += 4;
let data: Uint32Array | undefined;
if (length > 0) {
data = new Uint32Array(length);
for (let j = 0; j < length; j++) {
data[j] = buff.readUInt32BE(offset); offset += 4;
}
}
deltas[i] = { start, deleteCount, data };
}
return {
id: id,
areas: areas
type: 'delta',
deltas: deltas
};
}
function encodeArea(area: ISemanticTokensAreaDto, buff: VSBuffer, offset: number): number {
buff.writeUInt8(area.type === 'full' ? EncodedSemanticTokensAreaType.Full : EncodedSemanticTokensAreaType.Delta, offset); offset += 1;
buff.writeUInt32BE(area.line + 1, offset); offset += 4;
if (area.type === 'full') {
const tokens = area.data;
const tokenCount = (tokens.length / 5) | 0;
buff.writeUInt32BE(tokenCount, offset); offset += 4;
// here we are explicitly iterating an writing the ints again to ensure writing the desired endianness.
for (let i = 0; i < tokenCount; i++) {
const tokenOffset = 5 * i;
buff.writeUInt32BE(tokens[tokenOffset], offset); offset += 4;
buff.writeUInt32BE(tokens[tokenOffset + 1], offset); offset += 4;
buff.writeUInt32BE(tokens[tokenOffset + 2], offset); offset += 4;
buff.writeUInt32BE(tokens[tokenOffset + 3], offset); offset += 4;
buff.writeUInt32BE(tokens[tokenOffset + 4], offset); offset += 4;
}
// buff.set(VSBuffer.wrap(uint8), offset); offset += area.data.byteLength;
} else {
buff.writeUInt32BE(area.oldIndex, offset); offset += 4;
}
return offset;
}
function encodedAreaSize(area: ISemanticTokensAreaDto): number {
let result = 0;
result += 1; // type
result += 4; // line
if (area.type === 'full') {
const tokens = area.data;
const tokenCount = (tokens.length / 5) | 0;
result += 4; // token count
result += tokenCount * 5 * 4;
return result;
} else {
result += 4; // old index
return result;
}
}
function decodeArea(buff: VSBuffer, offset: number, areas: ISemanticTokensAreaDto[]): number {
const type: EncodedSemanticTokensAreaType = buff.readUInt8(offset); offset += 1;
const line = buff.readUInt32BE(offset); offset += 4;
if (type === EncodedSemanticTokensAreaType.Full) {
// here we are explicitly iterating and reading the ints again to ensure reading the desired endianness.
const tokenCount = buff.readUInt32BE(offset); offset += 4;
const data = new Uint32Array(5 * tokenCount);
for (let i = 0; i < tokenCount; i++) {
const destOffset = 5 * i;
data[destOffset] = buff.readUInt32BE(offset); offset += 4;
data[destOffset + 1] = buff.readUInt32BE(offset); offset += 4;
data[destOffset + 2] = buff.readUInt32BE(offset); offset += 4;
data[destOffset + 3] = buff.readUInt32BE(offset); offset += 4;
data[destOffset + 4] = buff.readUInt32BE(offset); offset += 4;
}
areas.push({
type: 'full',
line: line,
data: data
});
return offset;
} else {
const oldIndex = buff.readUInt32BE(offset); offset += 4;
areas.push({
type: 'delta',
line: line,
oldIndex: oldIndex
});
return offset;
}
}