diff --git a/extensions/vscode-api-tests/src/singlefolder-tests/chat.test.ts b/extensions/vscode-api-tests/src/singlefolder-tests/chat.test.ts index 366da085b9b..000b12d5bab 100644 --- a/extensions/vscode-api-tests/src/singlefolder-tests/chat.test.ts +++ b/extensions/vscode-api-tests/src/singlefolder-tests/chat.test.ts @@ -15,8 +15,8 @@ suite('chat', () => { disposables = []; // Register a dummy default model which is required for a participant request to go through - disposables.push(lm.registerChatModelProvider('test-lm-vendor', { - async prepareLanguageModelChat(_options, _token) { + disposables.push(lm.registerLanguageModelChatProvider('test-lm-vendor', { + async prepareLanguageModelChatInformation(_options, _token) { return [{ id: 'test-lm', name: 'test-lm', diff --git a/extensions/vscode-api-tests/src/singlefolder-tests/lm.test.ts b/extensions/vscode-api-tests/src/singlefolder-tests/lm.test.ts index 74b9ecefef5..aa7df40da01 100644 --- a/extensions/vscode-api-tests/src/singlefolder-tests/lm.test.ts +++ b/extensions/vscode-api-tests/src/singlefolder-tests/lm.test.ts @@ -35,12 +35,12 @@ suite('lm', function () { test('lm request and stream', async function () { - let p: vscode.Progress | undefined; + let p: vscode.Progress | undefined; const defer = new DeferredPromise(); try { - disposables.push(vscode.lm.registerChatModelProvider('test-lm-vendor', { - async prepareLanguageModelChat(_options, _token) { + disposables.push(vscode.lm.registerLanguageModelChatProvider('test-lm-vendor', { + async prepareLanguageModelChatInformation(_options, _token) { return [testProviderOptions]; }, async provideLanguageModelChatResponse(_model, _messages, _options, progress, _token) { @@ -79,7 +79,7 @@ suite('lm', function () { assert.strictEqual(responseText, ''); assert.strictEqual(streamDone, false); - p.report({ index: 0, part: new vscode.LanguageModelTextPart('Hello') }); + p.report(new vscode.LanguageModelTextPart('Hello')); defer.complete(); await pp; @@ -91,8 +91,8 @@ suite('lm', function () { test('lm request fail', async function () { - disposables.push(vscode.lm.registerChatModelProvider('test-lm-vendor', { - async prepareLanguageModelChat(_options, _token) { + disposables.push(vscode.lm.registerLanguageModelChatProvider('test-lm-vendor', { + async prepareLanguageModelChatInformation(_options, _token) { return [testProviderOptions]; }, async provideLanguageModelChatResponse(_model, _messages, _options, _progress, _token) { @@ -118,8 +118,8 @@ suite('lm', function () { const defer = new DeferredPromise(); - disposables.push(vscode.lm.registerChatModelProvider('test-lm-vendor', { - async prepareLanguageModelChat(_options, _token) { + disposables.push(vscode.lm.registerLanguageModelChatProvider('test-lm-vendor', { + async prepareLanguageModelChatInformation(_options, _token) { return [testProviderOptions]; }, async provideLanguageModelChatResponse(_model, _messages, _options, _progress, _token) { @@ -156,8 +156,8 @@ suite('lm', function () { test('LanguageModelError instance is not thrown to extensions#235322 (SYNC)', async function () { - disposables.push(vscode.lm.registerChatModelProvider('test-lm-vendor', { - async prepareLanguageModelChat(_options, _token) { + disposables.push(vscode.lm.registerLanguageModelChatProvider('test-lm-vendor', { + async prepareLanguageModelChatInformation(_options, _token) { return [testProviderOptions]; }, provideLanguageModelChatResponse(_model, _messages, _options, _progress, _token) { @@ -182,8 +182,8 @@ suite('lm', function () { test('LanguageModelError instance is not thrown to extensions#235322 (ASYNC)', async function () { - disposables.push(vscode.lm.registerChatModelProvider('test-lm-vendor', { - async prepareLanguageModelChat(_options, _token) { + disposables.push(vscode.lm.registerLanguageModelChatProvider('test-lm-vendor', { + async prepareLanguageModelChatInformation(_options, _token) { return [testProviderOptions]; }, async provideLanguageModelChatResponse(_model, _messages, _options, _progress, _token) { diff --git a/src/vs/workbench/api/browser/mainThreadLanguageModels.ts b/src/vs/workbench/api/browser/mainThreadLanguageModels.ts index f7da7c62939..078a4954770 100644 --- a/src/vs/workbench/api/browser/mainThreadLanguageModels.ts +++ b/src/vs/workbench/api/browser/mainThreadLanguageModels.ts @@ -16,7 +16,7 @@ import { ExtensionIdentifier } from '../../../platform/extensions/common/extensi import { ILogService } from '../../../platform/log/common/log.js'; import { resizeImage } from '../../contrib/chat/browser/imageUtils.js'; import { ILanguageModelIgnoredFilesService } from '../../contrib/chat/common/ignoredFiles.js'; -import { IChatMessage, IChatResponseFragment, ILanguageModelChatResponse, ILanguageModelChatSelector, ILanguageModelsService } from '../../contrib/chat/common/languageModels.js'; +import { IChatMessage, IChatResponsePart, ILanguageModelChatResponse, ILanguageModelChatSelector, ILanguageModelsService } from '../../contrib/chat/common/languageModels.js'; import { IAuthenticationAccessService } from '../../services/authentication/browser/authenticationAccessService.js'; import { AuthenticationSession, AuthenticationSessionsChangeEvent, IAuthenticationProvider, IAuthenticationService, INTERNAL_AUTH_PROVIDER_PREFIX } from '../../services/authentication/common/authentication.js'; import { IExtHostContext, extHostNamedCustomer } from '../../services/extensions/common/extHostCustomers.js'; @@ -32,7 +32,7 @@ export class MainThreadLanguageModels implements MainThreadLanguageModelsShape { private readonly _store = new DisposableStore(); private readonly _providerRegistrations = new DisposableMap(); private readonly _lmProviderChange = new Emitter<{ vendor: string }>(); - private readonly _pendingProgress = new Map; stream: AsyncIterableSource }>(); + private readonly _pendingProgress = new Map; stream: AsyncIterableSource }>(); private readonly _ignoredFileProviderRegistrations = new DisposableMap(); constructor( @@ -70,7 +70,7 @@ export class MainThreadLanguageModels implements MainThreadLanguageModelsShape { sendChatRequest: async (modelId, messages, from, options, token) => { const requestId = (Math.random() * 1e6) | 0; const defer = new DeferredPromise(); - const stream = new AsyncIterableSource(); + const stream = new AsyncIterableSource(); try { this._pendingProgress.set(requestId, { defer, stream }); @@ -103,7 +103,7 @@ export class MainThreadLanguageModels implements MainThreadLanguageModelsShape { this._lmProviderChange.fire({ vendor }); } - async $reportResponsePart(requestId: number, chunk: SerializableObjectWithBuffers): Promise { + async $reportResponsePart(requestId: number, chunk: SerializableObjectWithBuffers): Promise { const data = this._pendingProgress.get(requestId); this._logService.trace('[LM] report response PART', Boolean(data), requestId, chunk); if (data) { diff --git a/src/vs/workbench/api/common/extHost.api.impl.ts b/src/vs/workbench/api/common/extHost.api.impl.ts index c3503baaaef..c55e63ca03d 100644 --- a/src/vs/workbench/api/common/extHost.api.impl.ts +++ b/src/vs/workbench/api/common/extHost.api.impl.ts @@ -1533,9 +1533,9 @@ export function createApiFactoryAndRegisterActors(accessor: ServicesAccessor): I onDidChangeChatModels: (listener, thisArgs?, disposables?) => { return extHostLanguageModels.onDidChangeProviders(listener, thisArgs, disposables); }, - registerChatModelProvider: (vendor, provider) => { + registerLanguageModelChatProvider: (vendor, provider) => { checkProposedApiEnabled(extension, 'chatProvider'); - return extHostLanguageModels.registerLanguageModelProvider(extension, vendor, provider); + return extHostLanguageModels.registerLanguageModelChatProvider(extension, vendor, provider); }, // --- embeddings get embeddingModels() { diff --git a/src/vs/workbench/api/common/extHost.protocol.ts b/src/vs/workbench/api/common/extHost.protocol.ts index c32c6020f7c..700c1aae795 100644 --- a/src/vs/workbench/api/common/extHost.protocol.ts +++ b/src/vs/workbench/api/common/extHost.protocol.ts @@ -61,7 +61,7 @@ import { IChatContentInlineReference, IChatFollowup, IChatNotebookEdit, IChatPro import { IChatSessionItem } from '../../contrib/chat/common/chatSessionsService.js'; import { IChatRequestVariableValue } from '../../contrib/chat/common/chatVariables.js'; import { ChatAgentLocation } from '../../contrib/chat/common/constants.js'; -import { IChatMessage, IChatResponseFragment, ILanguageModelChatMetadataAndIdentifier, ILanguageModelChatSelector } from '../../contrib/chat/common/languageModels.js'; +import { IChatMessage, IChatResponsePart, ILanguageModelChatMetadataAndIdentifier, ILanguageModelChatSelector } from '../../contrib/chat/common/languageModels.js'; import { IPreparedToolInvocation, IToolInvocation, IToolInvocationPreparationContext, IToolProgressStep, IToolResult, ToolDataSource } from '../../contrib/chat/common/languageModelToolsService.js'; import { DebugConfigurationProviderTriggerKind, IAdapterDescriptor, IConfig, IDebugSessionReplMode, IDebugTestRunReference, IDebugVisualization, IDebugVisualizationContext, IDebugVisualizationTreeItem, MainThreadDebugVisualization } from '../../contrib/debug/common/debug.js'; import { McpCollectionDefinition, McpConnectionState, McpServerDefinition, McpServerLaunch } from '../../contrib/mcp/common/mcpTypes.js'; @@ -1263,7 +1263,7 @@ export interface MainThreadLanguageModelsShape extends IDisposable { $onLMProviderChange(vendor: string): void; $unregisterProvider(vendor: string): void; $tryStartChatRequest(extension: ExtensionIdentifier, modelIdentifier: string, requestId: number, messages: SerializableObjectWithBuffers, options: {}, token: CancellationToken): Promise; - $reportResponsePart(requestId: number, chunk: SerializableObjectWithBuffers): Promise; + $reportResponsePart(requestId: number, chunk: SerializableObjectWithBuffers): Promise; $reportResponseDone(requestId: number, error: SerializedError | undefined): Promise; $selectChatModels(selector: ILanguageModelChatSelector): Promise; $countTokens(modelId: string, value: string | IChatMessage, token: CancellationToken): Promise; @@ -1276,7 +1276,7 @@ export interface ExtHostLanguageModelsShape { $prepareLanguageModelProvider(vendor: string, options: { silent: boolean }, token: CancellationToken): Promise; $updateModelAccesslist(data: { from: ExtensionIdentifier; to: ExtensionIdentifier; enabled: boolean }[]): void; $startChatRequest(modelId: string, requestId: number, from: ExtensionIdentifier, messages: SerializableObjectWithBuffers, options: { [name: string]: any }, token: CancellationToken): Promise; - $acceptResponsePart(requestId: number, chunk: SerializableObjectWithBuffers): Promise; + $acceptResponsePart(requestId: number, chunk: SerializableObjectWithBuffers): Promise; $acceptResponseDone(requestId: number, error: SerializedError | undefined): Promise; $provideTokenLength(modelId: string, value: string | IChatMessage, token: CancellationToken): Promise; $isFileIgnored(handle: number, uri: UriComponents, token: CancellationToken): Promise; diff --git a/src/vs/workbench/api/common/extHostLanguageModels.ts b/src/vs/workbench/api/common/extHostLanguageModels.ts index 5121923d7b8..5af6665c282 100644 --- a/src/vs/workbench/api/common/extHostLanguageModels.ts +++ b/src/vs/workbench/api/common/extHostLanguageModels.ts @@ -17,7 +17,7 @@ import { ExtensionIdentifier, ExtensionIdentifierMap, ExtensionIdentifierSet, IE import { createDecorator } from '../../../platform/instantiation/common/instantiation.js'; import { ILogService } from '../../../platform/log/common/log.js'; import { Progress } from '../../../platform/progress/common/progress.js'; -import { IChatMessage, IChatResponseFragment, IChatResponsePart, ILanguageModelChatMetadata, ILanguageModelChatMetadataAndIdentifier } from '../../contrib/chat/common/languageModels.js'; +import { IChatMessage, IChatResponsePart, ILanguageModelChatMetadata, ILanguageModelChatMetadataAndIdentifier } from '../../contrib/chat/common/languageModels.js'; import { DEFAULT_MODEL_PICKER_CATEGORY } from '../../contrib/chat/common/modelPicker/modelPickerWidget.js'; import { INTERNAL_AUTH_PROVIDER_PREFIX } from '../../services/authentication/common/authentication.js'; import { checkProposedApiEnabled } from '../../services/extensions/common/extensions.js'; @@ -35,28 +35,16 @@ export const IExtHostLanguageModels = createDecorator('I type LanguageModelProviderData = { readonly extension: ExtensionIdentifier; readonly extensionName: string; - readonly provider: vscode.LanguageModelChatProvider2; + readonly provider: vscode.LanguageModelChatProvider; }; type LMResponsePart = vscode.LanguageModelTextPart | vscode.LanguageModelToolCallPart | vscode.LanguageModelDataPart | vscode.LanguageModelThinkingPart; -class LanguageModelResponseStream { - - readonly stream = new AsyncIterableSource(); - - constructor( - readonly option: number, - stream?: AsyncIterableSource - ) { - this.stream = stream ?? new AsyncIterableSource(); - } -} class LanguageModelResponse { readonly apiObject: vscode.LanguageModelChatResponse; - private readonly _responseStreams = new Map(); private readonly _defaultStream = new AsyncIterableSource(); private _isDone: boolean = false; @@ -80,72 +68,40 @@ class LanguageModelResponse { }; } - private * _streams() { - if (this._responseStreams.size > 0) { - for (const [, value] of this._responseStreams) { - yield value.stream; - } - } else { - yield this._defaultStream; - } - } - - handleFragment(fragments: IChatResponseFragment | IChatResponseFragment[]): void { + handleResponsePart(parts: IChatResponsePart | IChatResponsePart[]): void { if (this._isDone) { return; } - const partsByIndex = new Map(); + const lmResponseParts: LMResponsePart[] = []; - for (const fragment of Iterable.wrap(fragments)) { + for (const part of Iterable.wrap(parts)) { let out: LMResponsePart; - if (fragment.part.type === 'text') { - out = new extHostTypes.LanguageModelTextPart(fragment.part.value, fragment.part.audience); - } else if (fragment.part.type === 'thinking') { - out = new extHostTypes.LanguageModelThinkingPart(fragment.part.value, fragment.part.id, fragment.part.metadata); + if (part.type === 'text') { + out = new extHostTypes.LanguageModelTextPart(part.value, part.audience); + } else if (part.type === 'thinking') { + out = new extHostTypes.LanguageModelThinkingPart(part.value, part.id, part.metadata); - } else if (fragment.part.type === 'data') { - out = new extHostTypes.LanguageModelDataPart(fragment.part.data.buffer, fragment.part.mimeType, fragment.part.audience); + } else if (part.type === 'data') { + out = new extHostTypes.LanguageModelDataPart(part.data.buffer, part.mimeType, part.audience); } else { - out = new extHostTypes.LanguageModelToolCallPart(fragment.part.toolCallId, fragment.part.name, fragment.part.parameters); - } - const array = partsByIndex.get(fragment.index); - if (!array) { - partsByIndex.set(fragment.index, [out]); - } else { - array.push(out); + out = new extHostTypes.LanguageModelToolCallPart(part.toolCallId, part.name, part.parameters); } + lmResponseParts.push(out); } - - for (const [index, parts] of partsByIndex) { - let res = this._responseStreams.get(index); - if (!res) { - if (this._responseStreams.size === 0) { - // the first response claims the default response - res = new LanguageModelResponseStream(index, this._defaultStream); - } else { - res = new LanguageModelResponseStream(index); - } - this._responseStreams.set(index, res); - } - res.stream.emitMany(parts); - } + this._defaultStream.emitMany(lmResponseParts); } reject(err: Error): void { this._isDone = true; - for (const stream of this._streams()) { - stream.reject(err); - } + this._defaultStream.reject(err); } resolve(): void { this._isDone = true; - for (const stream of this._streams()) { - stream.resolve(); - } + this._defaultStream.resolve(); } } @@ -180,14 +136,14 @@ export class ExtHostLanguageModels implements ExtHostLanguageModelsShape { this._onDidChangeProviders.dispose(); } - registerLanguageModelProvider(extension: IExtensionDescription, vendor: string, provider: vscode.LanguageModelChatProvider2): IDisposable { + registerLanguageModelChatProvider(extension: IExtensionDescription, vendor: string, provider: vscode.LanguageModelChatProvider): IDisposable { this._languageModelProviders.set(vendor, { extension: extension.identifier, extensionName: extension.displayName || extension.name, provider }); this._proxy.$registerLanguageModelProvider(vendor); let providerChangeEventDisposable: IDisposable | undefined; - if (provider.onDidChange) { - providerChangeEventDisposable = provider.onDidChange(() => { + if (provider.onDidChangeLanguageModelInformation) { + providerChangeEventDisposable = provider.onDidChangeLanguageModelInformation(() => { this._proxy.$onLMProviderChange(vendor); }); } @@ -215,7 +171,7 @@ export class ExtHostLanguageModels implements ExtHostLanguageModelsShape { return []; } this._clearModelCache(vendor); - const modelInformation = await data.provider.prepareLanguageModelChat(options, token) ?? []; + const modelInformation = await data.provider.prepareLanguageModelChatInformation(options, token) ?? []; const modelMetadataAndIdentifier: ILanguageModelChatMetadataAndIdentifier[] = modelInformation.map(m => { let auth; if (m.auth) { @@ -231,8 +187,8 @@ export class ExtHostLanguageModels implements ExtHostLanguageModelsShape { vendor, name: m.name ?? '', family: m.family ?? '', - cost: m.cost, - description: m.description, + detail: m.detail, + tooltip: m.tooltip, version: m.version, maxInputTokens: m.maxInputTokens, maxOutputTokens: m.maxOutputTokens, @@ -272,7 +228,7 @@ export class ExtHostLanguageModels implements ExtHostLanguageModelsShape { throw new Error(`Language model provider for '${knownModel.metadata.id}' not found.`); } - const queue: IChatResponseFragment[] = []; + const queue: IChatResponsePart[] = []; const sendNow = () => { if (queue.length > 0) { this._proxy.$reportResponsePart(requestId, new SerializableObjectWithBuffers(queue)); @@ -280,7 +236,7 @@ export class ExtHostLanguageModels implements ExtHostLanguageModelsShape { } }; const queueScheduler = new RunOnceScheduler(sendNow, 30); - const sendSoon = (part: IChatResponseFragment) => { + const sendSoon = (part: IChatResponsePart) => { const newLen = queue.push(part); // flush/send if things pile up more than expected if (newLen > 30) { @@ -291,21 +247,21 @@ export class ExtHostLanguageModels implements ExtHostLanguageModelsShape { } }; - const progress = new Progress(async fragment => { + const progress = new Progress(async fragment => { if (token.isCancellationRequested) { this._logService.warn(`[CHAT](${data.extension.value}) CANNOT send progress because the REQUEST IS CANCELLED`); return; } let part: IChatResponsePart | undefined; - if (fragment.part instanceof extHostTypes.LanguageModelToolCallPart) { - part = { type: 'tool_use', name: fragment.part.name, parameters: fragment.part.input, toolCallId: fragment.part.callId }; - } else if (fragment.part instanceof extHostTypes.LanguageModelTextPart) { - part = { type: 'text', value: fragment.part.value, audience: fragment.part.audience }; - } else if (fragment.part instanceof extHostTypes.LanguageModelDataPart) { - part = { type: 'data', mimeType: fragment.part.mimeType, data: VSBuffer.wrap(fragment.part.data), audience: fragment.part.audience }; - } else if (fragment.part instanceof extHostTypes.LanguageModelThinkingPart) { - part = { type: 'thinking', value: fragment.part.value, id: fragment.part.id, metadata: fragment.part.metadata }; + if (fragment instanceof extHostTypes.LanguageModelToolCallPart) { + part = { type: 'tool_use', name: fragment.name, parameters: fragment.input, toolCallId: fragment.callId }; + } else if (fragment instanceof extHostTypes.LanguageModelTextPart) { + part = { type: 'text', value: fragment.value, audience: fragment.audience }; + } else if (fragment instanceof extHostTypes.LanguageModelDataPart) { + part = { type: 'data', mimeType: fragment.mimeType, data: VSBuffer.wrap(fragment.data), audience: fragment.audience }; + } else if (fragment instanceof extHostTypes.LanguageModelThinkingPart) { + part = { type: 'thinking', value: fragment.value, id: fragment.id, metadata: fragment.metadata }; } if (!part) { @@ -313,7 +269,7 @@ export class ExtHostLanguageModels implements ExtHostLanguageModelsShape { return; } - sendSoon({ index: fragment.index, part }); + sendSoon(part); }); let value: unknown; @@ -490,10 +446,10 @@ export class ExtHostLanguageModels implements ExtHostLanguageModelsShape { return internalMessages; } - async $acceptResponsePart(requestId: number, chunk: SerializableObjectWithBuffers): Promise { + async $acceptResponsePart(requestId: number, chunk: SerializableObjectWithBuffers): Promise { const data = this._pendingRequest.get(requestId); if (data) { - data.res.handleFragment(chunk.value); + data.res.handleResponsePart(chunk.value); } } diff --git a/src/vs/workbench/contrib/chat/browser/modelPicker/modelPickerActionItem.ts b/src/vs/workbench/contrib/chat/browser/modelPicker/modelPickerActionItem.ts index 36c33f72f0e..2c0b0fd327a 100644 --- a/src/vs/workbench/contrib/chat/browser/modelPicker/modelPickerActionItem.ts +++ b/src/vs/workbench/contrib/chat/browser/modelPicker/modelPickerActionItem.ts @@ -38,8 +38,8 @@ function modelDelegateToWidgetActionsProvider(delegate: IModelPickerDelegate): I checked: model.identifier === delegate.getCurrentModel()?.identifier, category: model.metadata.modelPickerCategory || DEFAULT_MODEL_PICKER_CATEGORY, class: undefined, - description: model.metadata.cost, - tooltip: model.metadata.description ?? model.metadata.name, + description: model.metadata.detail, + tooltip: model.metadata.tooltip ?? model.metadata.name, label: model.metadata.name, run: () => { delegate.setModel(model); diff --git a/src/vs/workbench/contrib/chat/common/languageModels.ts b/src/vs/workbench/contrib/chat/common/languageModels.ts index d4678314284..3a5b58e0fd3 100644 --- a/src/vs/workbench/contrib/chat/common/languageModels.ts +++ b/src/vs/workbench/contrib/chat/common/languageModels.ts @@ -143,11 +143,6 @@ export type IChatResponsePart = IChatResponseTextPart | IChatResponseToolUsePart export type IExtendedChatResponsePart = IChatResponsePullRequestPart; -export interface IChatResponseFragment { - index: number; - part: IChatResponsePart; -} - export interface ILanguageModelChatMetadata { readonly extension: ExtensionIdentifier; @@ -155,8 +150,8 @@ export interface ILanguageModelChatMetadata { readonly id: string; readonly vendor: string; readonly version: string; - readonly description?: string; - readonly cost?: string; + readonly tooltip?: string; + readonly detail?: string; readonly family: string; readonly maxInputTokens: number; readonly maxOutputTokens: number; @@ -191,7 +186,7 @@ export namespace ILanguageModelChatMetadata { } export interface ILanguageModelChatResponse { - stream: AsyncIterable; + stream: AsyncIterable; result: Promise; } diff --git a/src/vs/workbench/contrib/chat/common/promptSyntax/languageProviders/promptHeaderHovers.ts b/src/vs/workbench/contrib/chat/common/promptSyntax/languageProviders/promptHeaderHovers.ts index a8505faeb8e..2c38cb53c2b 100644 --- a/src/vs/workbench/contrib/chat/common/promptSyntax/languageProviders/promptHeaderHovers.ts +++ b/src/vs/workbench/contrib/chat/common/promptSyntax/languageProviders/promptHeaderHovers.ts @@ -162,8 +162,8 @@ export class PromptHeaderHoverProvider extends Disposable implements HoverProvid lines.push(localize('modelName', '- Name: {0}', meta.name)); lines.push(localize('modelFamily', '- Family: {0}', meta.family)); lines.push(localize('modelVendor', '- Vendor: {0}', meta.vendor)); - if (meta.description) { - lines.push('', '', meta.description); + if (meta.tooltip) { + lines.push('', '', meta.tooltip); } return this.createHover(lines.join('\n'), range); } diff --git a/src/vs/workbench/contrib/chat/test/common/languageModels.test.ts b/src/vs/workbench/contrib/chat/test/common/languageModels.test.ts index a0b89049f1a..8c156b6b00d 100644 --- a/src/vs/workbench/contrib/chat/test/common/languageModels.test.ts +++ b/src/vs/workbench/contrib/chat/test/common/languageModels.test.ts @@ -10,7 +10,7 @@ import { DisposableStore } from '../../../../../base/common/lifecycle.js'; import { mock } from '../../../../../base/test/common/mock.js'; import { ensureNoDisposablesAreLeakedInTestSuite } from '../../../../../base/test/common/utils.js'; import { NullLogService } from '../../../../../platform/log/common/log.js'; -import { ChatMessageRole, IChatResponseFragment, languageModelExtensionPoint, LanguageModelsService, IChatMessage } from '../../common/languageModels.js'; +import { ChatMessageRole, languageModelExtensionPoint, LanguageModelsService, IChatMessage, IChatResponsePart } from '../../common/languageModels.js'; import { IExtensionService, nullExtensionDescription } from '../../../../services/extensions/common/extensions.js'; import { ExtensionsRegistry } from '../../../../services/extensions/common/extensionsRegistry.js'; import { DEFAULT_MODEL_PICKER_CATEGORY } from '../../common/modelPicker/modelPickerWidget.js'; @@ -152,11 +152,11 @@ suite('LanguageModels', function () { // const message = messages.at(-1); const defer = new DeferredPromise(); - const stream = new AsyncIterableSource(); + const stream = new AsyncIterableSource(); (async () => { while (!token.isCancellationRequested) { - stream.emitOne({ index: 0, part: { type: 'text', value: Date.now().toString() } }); + stream.emitOne({ type: 'text', value: Date.now().toString() }); await timeout(10); } defer.complete(undefined); diff --git a/src/vs/workbench/contrib/mcp/browser/mcpCommands.ts b/src/vs/workbench/contrib/mcp/browser/mcpCommands.ts index 7421cbcda4d..74835e0bcd3 100644 --- a/src/vs/workbench/contrib/mcp/browser/mcpCommands.ts +++ b/src/vs/workbench/contrib/mcp/browser/mcpCommands.ts @@ -1055,7 +1055,7 @@ export class McpConfigureSamplingModels extends Action2 { } return { label: model.name, - description: model.description, + description: model.tooltip, id, picked: existingIds.size ? existingIds.has(id) : model.isDefault, }; diff --git a/src/vs/workbench/contrib/mcp/common/mcpSamplingService.ts b/src/vs/workbench/contrib/mcp/common/mcpSamplingService.ts index f7b7aa14dde..1bb6cc8219a 100644 --- a/src/vs/workbench/contrib/mcp/common/mcpSamplingService.ts +++ b/src/vs/workbench/contrib/mcp/common/mcpSamplingService.ts @@ -84,12 +84,12 @@ export class McpSamplingService extends Disposable implements IMcpSamplingServic for await (const part of response.stream) { if (Array.isArray(part)) { for (const p of part) { - if (p.part.type === 'text') { - responseText += p.part.value; + if (p.type === 'text') { + responseText += p.value; } } - } else if (part.part.type === 'text') { - responseText += part.part.value; + } else if (part.type === 'text') { + responseText += part.value; } } })(); diff --git a/src/vs/workbench/contrib/terminalContrib/chatAgentTools/browser/bufferOutputPolling.ts b/src/vs/workbench/contrib/terminalContrib/chatAgentTools/browser/bufferOutputPolling.ts index 88ceadc1c95..f6203789a53 100644 --- a/src/vs/workbench/contrib/terminalContrib/chatAgentTools/browser/bufferOutputPolling.ts +++ b/src/vs/workbench/contrib/terminalContrib/chatAgentTools/browser/bufferOutputPolling.ts @@ -240,12 +240,12 @@ export async function assessOutputForErrors(buffer: string, token: CancellationT for await (const part of response.stream) { if (Array.isArray(part)) { for (const p of part) { - if (p.part.type === 'text') { - responseText += p.part.value; + if (p.type === 'text') { + responseText += p.value; } } - } else if (part.part.type === 'text') { - responseText += part.part.value; + } else if (part.type === 'text') { + responseText += part.value; } } })(); diff --git a/src/vscode-dts/vscode.proposed.chatProvider.d.ts b/src/vscode-dts/vscode.proposed.chatProvider.d.ts index ac0d1b92143..fb69aaa93a9 100644 --- a/src/vscode-dts/vscode.proposed.chatProvider.d.ts +++ b/src/vscode-dts/vscode.proposed.chatProvider.d.ts @@ -5,14 +5,6 @@ declare module 'vscode' { - - // @API extension ship a d.ts files for their options - - // @API the LanguageModelChatProvider2 is an alternative that combines a source, like ollama etc, with - // concrete models. The `provideLanguageModelChatData` would do the discovery and auth dances and later - // the model data is passed to the concrete function for making a requested or counting token - - // TODO@API name scheme export interface LanguageModelChatRequestHandleOptions { @@ -44,7 +36,6 @@ declare module 'vscode' { toolMode?: LanguageModelChatToolMode; } - // TODO@API names: LanguageModelChatMetadata, LanguageModelChatItem export interface LanguageModelChatInformation { readonly id: string; @@ -60,14 +51,14 @@ declare module 'vscode' { readonly family: string; /** - * An optional, human-readable description of the language model. + * The tooltip to render when hovering the model */ - readonly description?: string; + readonly tooltip?: string; /** - * An optional, human-readable string representing the cost of using the language model. + * An optional, human-readable string which will be rendered alongside the model. */ - readonly cost?: string; + readonly detail?: string; /** * Opaque version string of the model. This is defined by the extension contributing the language model @@ -101,44 +92,56 @@ declare module 'vscode' { // TODO@API should be `boolean | number` so extensions can express how many tools they support readonly toolCalling?: boolean | number; - // TODO@API DO NOT SUPPORT THIS - // readonly agentMode?: boolean; - - // TODO@API support prompt TSX style messages, MAYBE leave it out for now - readonly promptTsx?: boolean; }; /** * Optional category to group models by in the model picker. * The lower the order, the higher the category appears in the list. * Has no effect if `isUserSelectable` is `false`. - * If not specified, the model will appear in the "Other Models" category. */ readonly category?: { label: string; order: number }; } - export interface LanguageModelChatProvider2 { + /** + * The provider version of @link {LanguageModelChatMessage}. + */ + export interface LanguageModelChatRequestMessage { + /** + * The role of this message. + */ + readonly role: LanguageModelChatMessageRole; + + /** + * A string or heterogeneous array of things that a message can contain as content. Some parts may be message-type + * specific for some models. + */ + readonly content: Array; + + /** + * The optional name of a user for this message. + */ + readonly name: string | undefined; + } + + export interface LanguageModelChatProvider { // signals a change from the provider to the editor so that prepareLanguageModelChat is called again - onDidChange?: Event; + onDidChangeLanguageModelInformation?: Event; // NOT cacheable (between reloads) - prepareLanguageModelChat(options: { silent: boolean }, token: CancellationToken): ProviderResult; + prepareLanguageModelChatInformation(options: PrepareLMChatModelOptions, token: CancellationToken): ProviderResult; - provideLanguageModelChatResponse(model: T, messages: Array, options: LanguageModelChatRequestHandleOptions, progress: Progress, token: CancellationToken): Thenable; + provideLanguageModelChatResponse(model: T, messages: Array, options: LanguageModelChatRequestHandleOptions, progress: Progress, token: CancellationToken): Thenable; - provideTokenCount(model: T, text: string | LanguageModelChatMessage | LanguageModelChatMessage2, token: CancellationToken): Thenable; + provideTokenCount(model: T, text: string | LanguageModelChatRequestMessage, token: CancellationToken): Thenable; } export namespace lm { - export function registerChatModelProvider(vendor: string, provider: LanguageModelChatProvider2): Disposable; + export function registerLanguageModelChatProvider(vendor: string, provider: LanguageModelChatProvider): Disposable; } - - - export interface ChatResponseFragment2 { - index: number; - part: LanguageModelTextPart | LanguageModelToolCallPart | LanguageModelDataPart | LanguageModelThinkingPart; + export interface PrepareLMChatModelOptions { + silent: boolean; } }