diff --git a/src/vs/workbench/api/browser/mainThreadLanguageModelTools.ts b/src/vs/workbench/api/browser/mainThreadLanguageModelTools.ts index de592ed2c5a..257a59aa9ed 100644 --- a/src/vs/workbench/api/browser/mainThreadLanguageModelTools.ts +++ b/src/vs/workbench/api/browser/mainThreadLanguageModelTools.ts @@ -59,8 +59,7 @@ export class MainThreadLanguageModelTools extends Disposable implements MainThre this._countTokenCallbacks.delete(dto.callId); } }, - provideToolConfirmationMessages: (participantName, parameters, token) => this._proxy.$provideToolConfirmationMessages(id, participantName, parameters, token), - provideToolInvocationMessage: (parameters, token) => this._proxy.$provideToolInvocationMessage(id, parameters, token), + prepareToolInvocation: (participantName, parameters, token) => this._proxy.$prepareToolInvocation(id, participantName, parameters, token), }); this._tools.set(id, disposable); } diff --git a/src/vs/workbench/api/common/extHost.api.impl.ts b/src/vs/workbench/api/common/extHost.api.impl.ts index 4a7e02cdf34..7afaa349233 100644 --- a/src/vs/workbench/api/common/extHost.api.impl.ts +++ b/src/vs/workbench/api/common/extHost.api.impl.ts @@ -1491,11 +1491,11 @@ export function createApiFactoryAndRegisterActors(accessor: ServicesAccessor): I return extHostEmbeddings.computeEmbeddings(embeddingsModel, input, token); } }, - registerTool(toolId: string, tool: vscode.LanguageModelTool) { + registerTool(toolId: string, tool: vscode.LanguageModelTool) { checkProposedApiEnabled(extension, 'lmTools'); return extHostLanguageModelTools.registerTool(extension, toolId, tool); }, - invokeTool(toolId: string, parameters: vscode.LanguageModelToolInvocationOptions, token: vscode.CancellationToken) { + invokeTool(toolId: string, parameters: vscode.LanguageModelToolInvocationOptions, token: vscode.CancellationToken) { checkProposedApiEnabled(extension, 'lmTools'); return extHostLanguageModelTools.invokeTool(toolId, parameters, token); }, diff --git a/src/vs/workbench/api/common/extHost.protocol.ts b/src/vs/workbench/api/common/extHost.protocol.ts index 3b5b50d0983..7489b7d0c5c 100644 --- a/src/vs/workbench/api/common/extHost.protocol.ts +++ b/src/vs/workbench/api/common/extHost.protocol.ts @@ -56,7 +56,7 @@ import { IChatProgressHistoryResponseContent } from '../../contrib/chat/common/c import { IChatContentInlineReference, IChatFollowup, IChatProgress, IChatResponseErrorDetails, IChatTask, IChatTaskDto, IChatUserActionEvent, IChatVoteAction } from '../../contrib/chat/common/chatService.js'; import { IChatRequestVariableValue, IChatVariableData, IChatVariableResolverProgress } from '../../contrib/chat/common/chatVariables.js'; import { IChatMessage, IChatResponseFragment, ILanguageModelChatMetadata, ILanguageModelChatSelector, ILanguageModelsChangeEvent } from '../../contrib/chat/common/languageModels.js'; -import { IToolConfirmationMessages, IToolData, IToolInvocation, IToolResult } from '../../contrib/chat/common/languageModelToolsService.js'; +import { IPreparedToolInvocation, IToolData, IToolInvocation, IToolResult } from '../../contrib/chat/common/languageModelToolsService.js'; import { DebugConfigurationProviderTriggerKind, IAdapterDescriptor, IConfig, IDebugSessionReplMode, IDebugTestRunReference, IDebugVisualization, IDebugVisualizationContext, IDebugVisualizationTreeItem, MainThreadDebugVisualization } from '../../contrib/debug/common/debug.js'; import * as notebookCommon from '../../contrib/notebook/common/notebookCommon.js'; import { CellExecutionUpdateType } from '../../contrib/notebook/common/notebookExecutionService.js'; @@ -1369,8 +1369,7 @@ export interface ExtHostLanguageModelToolsShape { $invokeTool(dto: IToolInvocation, token: CancellationToken): Promise; $countTokensForInvocation(callId: string, input: string, token: CancellationToken): Promise; - $provideToolConfirmationMessages(toolId: string, participantName: string, parameters: any, token: CancellationToken): Promise; - $provideToolInvocationMessage(toolId: string, parameters: any, token: CancellationToken): Promise; + $prepareToolInvocation(toolId: string, participantName: string, parameters: any, token: CancellationToken): Promise; } export interface MainThreadUrlsShape extends IDisposable { diff --git a/src/vs/workbench/api/common/extHostLanguageModelTools.ts b/src/vs/workbench/api/common/extHostLanguageModelTools.ts index dd9cb929933..21240737888 100644 --- a/src/vs/workbench/api/common/extHostLanguageModelTools.ts +++ b/src/vs/workbench/api/common/extHostLanguageModelTools.ts @@ -3,6 +3,7 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +import type * as vscode from 'vscode'; import { raceCancellation } from '../../../base/common/async.js'; import { CancellationToken } from '../../../base/common/cancellation.js'; import { CancellationError } from '../../../base/common/errors.js'; @@ -10,14 +11,13 @@ import { IDisposable, toDisposable } from '../../../base/common/lifecycle.js'; import { revive } from '../../../base/common/marshalling.js'; import { generateUuid } from '../../../base/common/uuid.js'; import { IExtensionDescription } from '../../../platform/extensions/common/extensions.js'; +import { IPreparedToolInvocation, IToolInvocation, IToolInvocationContext, IToolResult } from '../../contrib/chat/common/languageModelToolsService.js'; import { ExtHostLanguageModelToolsShape, IMainContext, IToolDataDto, MainContext, MainThreadLanguageModelToolsShape } from './extHost.protocol.js'; import * as typeConvert from './extHostTypeConverters.js'; -import { IToolConfirmationMessages, IToolInvocation, IToolInvocationContext, IToolResult } from '../../contrib/chat/common/languageModelToolsService.js'; -import type * as vscode from 'vscode'; export class ExtHostLanguageModelTools implements ExtHostLanguageModelToolsShape { /** A map of tools that were registered in this EH */ - private readonly _registeredTools = new Map(); + private readonly _registeredTools = new Map }>(); private readonly _proxy: MainThreadLanguageModelToolsShape; private readonly _tokenCountFuncs = new Map Thenable>(); @@ -43,7 +43,7 @@ export class ExtHostLanguageModelTools implements ExtHostLanguageModelToolsShape return await fn(input, token); } - async invokeTool(toolId: string, options: vscode.LanguageModelToolInvocationOptions, token: CancellationToken): Promise { + async invokeTool(toolId: string, options: vscode.LanguageModelToolInvocationOptions, token: CancellationToken): Promise { if (!options.requestedContentTypes?.length) { throw new Error('LanguageModelToolInvocationOptions.requestedContentTypes is required to be set'); } @@ -86,7 +86,7 @@ export class ExtHostLanguageModelTools implements ExtHostLanguageModelToolsShape throw new Error(`Unknown tool ${dto.toolId}`); } - const options: vscode.LanguageModelToolInvocationOptions = { parameters: dto.parameters, toolInvocationToken: dto.context, requestedContentTypes: dto.requestedContentTypes }; + const options: vscode.LanguageModelToolInvocationOptions = { parameters: dto.parameters, toolInvocationToken: dto.context, requestedContentTypes: dto.requestedContentTypes }; if (dto.tokenBudget !== undefined) { options.tokenOptions = { tokenBudget: dto.tokenBudget, @@ -121,41 +121,31 @@ export class ExtHostLanguageModelTools implements ExtHostLanguageModelToolsShape return extensionResult; } - async $provideToolConfirmationMessages(toolId: string, participantName: string, parameters: any, token: CancellationToken): Promise { + async $prepareToolInvocation(toolId: string, participantName: string, parameters: any, token: CancellationToken): Promise { const item = this._registeredTools.get(toolId); if (!item) { throw new Error(`Unknown tool ${toolId}`); } - if (!item.tool.provideToolConfirmationMessages) { + if (!item.tool.prepareToolInvocation) { return undefined; } - const result = await item.tool.provideToolConfirmationMessages({ participantName, parameters }, token); + const result = await item.tool.prepareToolInvocation({ participantName, parameters }, token); if (!result) { return undefined; } return { - title: result.title, - message: typeof result.message === 'string' ? result.message : typeConvert.MarkdownString.from(result.message), + confirmationMessages: result.confirmationMessages ? { + title: result.confirmationMessages.title, + message: typeof result.confirmationMessages.message === 'string' ? result.confirmationMessages.message : typeConvert.MarkdownString.from(result.confirmationMessages.message), + } : undefined, + invocationMessage: result.invocationMessage }; } - async $provideToolInvocationMessage(toolId: string, parameters: any, token: CancellationToken): Promise { - const item = this._registeredTools.get(toolId); - if (!item) { - throw new Error(`Unknown tool ${toolId}`); - } - - if (!item.tool.provideToolInvocationMessage) { - return undefined; - } - - return await item.tool.provideToolInvocationMessage(parameters, token); - } - - registerTool(extension: IExtensionDescription, id: string, tool: vscode.LanguageModelTool): IDisposable { + registerTool(extension: IExtensionDescription, id: string, tool: vscode.LanguageModelTool): IDisposable { this._registeredTools.set(id, { extension, tool }); this._proxy.$registerTool(id); diff --git a/src/vs/workbench/api/common/extHostTypeConverters.ts b/src/vs/workbench/api/common/extHostTypeConverters.ts index e2de5c60f4b..e67f9c22340 100644 --- a/src/vs/workbench/api/common/extHostTypeConverters.ts +++ b/src/vs/workbench/api/common/extHostTypeConverters.ts @@ -2926,10 +2926,11 @@ export namespace LanguageModelToolDescription { export function to(item: IToolData): vscode.LanguageModelToolDescription { return { id: item.id, - modelDescription: item.modelDescription, + description: item.modelDescription, parametersSchema: item.parametersSchema, displayName: item.displayName, supportedContentTypes: item.supportedContentTypes, + tags: item.tags ?? [], }; } } diff --git a/src/vs/workbench/contrib/chat/common/languageModelToolsService.ts b/src/vs/workbench/contrib/chat/common/languageModelToolsService.ts index 610b22e32ea..0cccc3125fe 100644 --- a/src/vs/workbench/contrib/chat/common/languageModelToolsService.ts +++ b/src/vs/workbench/contrib/chat/common/languageModelToolsService.ts @@ -26,6 +26,7 @@ export interface IToolData { name?: string; icon?: { dark: URI; light?: URI } | ThemeIcon; when?: ContextKeyExpression; + tags?: string[]; displayName?: string; userDescription?: string; modelDescription: string; @@ -43,7 +44,7 @@ interface IToolEntry { export interface IToolInvocation { callId: string; toolId: string; - parameters: any; + parameters: Object; tokenBudget?: number; context: IToolInvocationContext | undefined; requestedContentTypes: string[]; @@ -62,10 +63,14 @@ export interface IToolConfirmationMessages { message: string | IMarkdownString; } +export interface IPreparedToolInvocation { + invocationMessage?: string; + confirmationMessages?: IToolConfirmationMessages; +} + export interface IToolImpl { invoke(invocation: IToolInvocation, countTokens: CountTokensCallback, token: CancellationToken): Promise; - provideToolConfirmationMessages(participantName: string, parameters: any, token: CancellationToken): Promise; - provideToolInvocationMessage(parameters: any, token: CancellationToken): Promise; + prepareToolInvocation?(participantName: string, parameters: any, token: CancellationToken): Promise; } export const ILanguageModelToolsService = createDecorator('ILanguageModelToolsService'); @@ -208,22 +213,19 @@ export class LanguageModelToolsService extends Disposable implements ILanguageMo // TODO if a tool requiresConfirmation but is not being invoked inside a chat session, we can show some other UI, like a modal notification const participantName = request.response?.agent?.fullName ?? ''; // This should always be set in this scenario with a new live request - const getConfirmationMessages = async (): Promise => { - if (!tool.data.requiresConfirmation) { - return undefined; - } - - return (await tool.impl!.provideToolConfirmationMessages(participantName, dto.parameters, token)) ?? { + const prepared = tool.impl.prepareToolInvocation ? + await tool.impl.prepareToolInvocation(participantName, dto.parameters, token) + : undefined; + const confirmationMessages = tool.data.requiresConfirmation ? + prepared?.confirmationMessages ?? { title: localize('toolConfirmTitle', "Use {0}?", `"${tool.data.displayName ?? tool.data.id}"`), message: localize('toolConfirmMessage', "{0} will use {1}.", participantName, `"${tool.data.displayName ?? tool.data.id}"`), - }; - }; - const [invocationMessage, confirmationMessages] = await Promise.all([ - tool.impl.provideToolInvocationMessage(dto.parameters, token), - getConfirmationMessages() - ]); + } + : undefined; + const defaultMessage = localize('toolInvocationMessage', "Using {0}", `"${tool.data.displayName ?? tool.data.id}"`); - toolInvocation = new ChatToolInvocation(invocationMessage ?? defaultMessage, confirmationMessages); + const invocationMessage = prepared?.invocationMessage ?? defaultMessage; + toolInvocation = new ChatToolInvocation(invocationMessage, confirmationMessages); token.onCancellationRequested(() => { toolInvocation!.confirmed.complete(false); }); diff --git a/src/vs/workbench/contrib/chat/common/tools/languageModelToolsContribution.ts b/src/vs/workbench/contrib/chat/common/tools/languageModelToolsContribution.ts index 34b8ed6ecdd..7f96023fcbf 100644 --- a/src/vs/workbench/contrib/chat/common/tools/languageModelToolsContribution.ts +++ b/src/vs/workbench/contrib/chat/common/tools/languageModelToolsContribution.ts @@ -21,6 +21,7 @@ interface IRawToolContribution { name?: string; icon?: string | { light: string; dark: string }; when?: string; + tags?: string[]; displayName?: string; userDescription?: string; modelDescription: string; @@ -110,7 +111,13 @@ const languageModelToolsExtensionPoint = extensionsRegistry.ExtensionsRegistry.r }, requiresConfirmation: { description: localize('requiresConfirmation', "Whether this tool requires user confirmation before being executed."), - type: 'boolean' + }, + tags: { + description: localize('toolTags', "A set of tags that roughly describe the tool's capabilities. A tool user may use these to filter the set of tools to just ones that are relevant for the task at hand."), + type: 'array', + items: { + type: 'string' + } } } } diff --git a/src/vs/workbench/contrib/chat/test/browser/languageModelToolsService.test.ts b/src/vs/workbench/contrib/chat/test/browser/languageModelToolsService.test.ts index e2beaf638f1..c8dee0123b6 100644 --- a/src/vs/workbench/contrib/chat/test/browser/languageModelToolsService.test.ts +++ b/src/vs/workbench/contrib/chat/test/browser/languageModelToolsService.test.ts @@ -49,8 +49,6 @@ suite('LanguageModelToolsService', () => { const toolImpl: IToolImpl = { invoke: async () => ({ 'text/plain': 'result' }), - provideToolInvocationMessage: async () => 'test', - provideToolConfirmationMessages: async () => ({ title: 'test', message: 'test' }), }; store.add(service.registerToolImplementation('testTool', toolImpl)); @@ -104,9 +102,7 @@ suite('LanguageModelToolsService', () => { assert.strictEqual(invocation.toolId, 'testTool'); assert.deepStrictEqual(invocation.parameters, { a: 1 }); return { 'text/plain': 'result' }; - }, - provideToolInvocationMessage: async () => 'test', - provideToolConfirmationMessages: async () => ({ title: 'test', message: 'test' }), + } }; store.add(service.registerToolImplementation('testTool', toolImpl)); diff --git a/src/vscode-dts/vscode.proposed.lmTools.d.ts b/src/vscode-dts/vscode.proposed.lmTools.d.ts index 89175362934..5f12e3df2d2 100644 --- a/src/vscode-dts/vscode.proposed.lmTools.d.ts +++ b/src/vscode-dts/vscode.proposed.lmTools.d.ts @@ -12,7 +12,7 @@ declare module 'vscode' { // API -> LM: an tool/function that is available to the language model export interface LanguageModelChatTool { - // TODO@API should use "id" here to match vscode tools, or keep name to match OpenAI? + // TODO@API should use "id" here to match vscode tools, or keep name to match OpenAI? Align everything. name: string; description: string; parametersSchema?: JSONSchema; @@ -71,21 +71,32 @@ declare module 'vscode' { content2: (string | LanguageModelChatMessageToolResultPart | LanguageModelChatResponseToolCallPart)[]; } + // Tool registration/invoking between extensions + + /** + * A result returned from a tool invocation. + */ export interface LanguageModelToolResult { /** - * The result can contain arbitrary representations of the content. Use {@link LanguageModelToolInvocationOptions.requested} to request particular types. - * `text/plain` is required to be supported by all tools. Another example might be a `PromptElementJSON` from `@vscode/prompt-tsx`, using the `contentType` exported by that library. + * The result can contain arbitrary representations of the content. A tool user can set + * {@link LanguageModelToolInvocationOptions.requested} to request particular types, and a tool implementation should only + * compute the types that were requested. `text/plain` is required to be supported by all tools. Another example might be + * a `PromptElementJSON` from `@vscode/prompt-tsx`, using the `contentType` exported by that library. */ [contentType: string]: any; - } - // Tool registration/invoking between extensions + /** + * A string representation of the result. + */ + 'text/plain'?: string; + } export namespace lm { /** - * Register a LanguageModelTool. The tool must also be registered in the package.json `languageModelTools` contribution point. + * Register a LanguageModelTool. The tool must also be registered in the package.json `languageModelTools` contribution + * point. A registered tool is available in the {@link lm.tools} list for any extension to invoke. */ - export function registerTool(id: string, tool: LanguageModelTool): Disposable; + export function registerTool(id: string, tool: LanguageModelTool): Disposable; /** * A list of all available tools. @@ -95,26 +106,35 @@ declare module 'vscode' { /** * Invoke a tool with the given parameters. */ - export function invokeTool(id: string, options: LanguageModelToolInvocationOptions, token: CancellationToken): Thenable; + export function invokeTool(id: string, options: LanguageModelToolInvocationOptions, token: CancellationToken): Thenable; } + /** + * A token that can be passed to {@link lm.invokeTool} when invoking a tool inside the context of handling a chat request. + */ export type ChatParticipantToolToken = unknown; - export interface LanguageModelToolInvocationOptions { + /** + * Options provided for tool invocation. + */ + export interface LanguageModelToolInvocationOptions { /** - * When this tool is being invoked within the context of a chat request, this token should be passed from {@link ChatRequest.toolInvocationToken}. - * In that case, a progress bar will be automatically shown for the tool invocation in the chat response view. If the tool is being invoked - * outside of a chat request, `undefined` should be passed instead. + * When this tool is being invoked within the context of a chat request, this token should be passed from + * {@link ChatRequest.toolInvocationToken}. In that case, a progress bar will be automatically shown for the tool + * invocation in the chat response view, and if the tool requires user confirmation, it will show up inline in the chat + * view. If the tool is being invoked outside of a chat request, `undefined` should be passed instead. */ toolInvocationToken: ChatParticipantToolToken | undefined; /** - * Parameters with which to invoke the tool. + * The parameters with which to invoke the tool. The parameters must match the schema defined in + * {@link LanguageModelToolDescription.parametersSchema} */ - parameters: Object; + parameters: T; /** - * A tool invoker can request that particular content types be returned from the tool. All tools are required to support `text/plain`. + * A tool user can request that particular content types be returned from the tool, depending on what the tool user + * supports. All tools are required to support `text/plain`. See {@link LanguageModelToolResult}. */ requestedContentTypes: string[]; @@ -137,59 +157,118 @@ declare module 'vscode' { }; } - export type JSONSchema = object; + /** + * Represents a JSON Schema. + * TODO@API - is this worth it? + */ + export type JSONSchema = Object; + /** + * A description of an available tool. + */ export interface LanguageModelToolDescription { /** * A unique identifier for the tool. */ - id: string; + readonly id: string; /** * A human-readable name for this tool that may be used to describe it in the UI. + * TODO@API keep? */ - displayName: string | undefined; + readonly displayName: string | undefined; /** * A description of this tool that may be passed to a language model. */ - modelDescription: string; + readonly description: string; /** * A JSON schema for the parameters this tool accepts. */ - parametersSchema?: JSONSchema; + readonly parametersSchema?: JSONSchema; /** - * The list of content types that the tool has declared support for. + * The list of content types that the tool has declared support for. See {@link LanguageModelToolResult}. */ - supportedContentTypes: string[]; - } - - export interface LanguageModelToolProvideConfirmationMessageOptions { - participantName: string; - parameters: any; + readonly supportedContentTypes: string[]; + + /** + * A set of tags, declared by the tool, that roughly describe the tool's capabilities. A tool user may use these to filter + * the set of tools to just ones that are relevant for the task at hand. + */ + readonly tags: string[]; } + /** + * Messages shown in the chat view when a tool needs confirmation from the user to run. These messages will be shown with + * buttons that say Continue and Cancel. + */ export interface LanguageModelToolConfirmationMessages { + /** + * The title of the confirmation message. + */ title: string; + + /** + * The body of the confirmation message. This should be phrased as an action of the participant that is invoking the tool + * from {@link LanguageModelToolInvocationPrepareOptions.participantName}. An example of a good message would be + * `${participantName} will run the command ${echo 'hello world'} in the terminal.` + * TODO@API keep this? + */ message: string | MarkdownString; } - export interface LanguageModelTool { - invoke(options: LanguageModelToolInvocationOptions, token: CancellationToken): ProviderResult; + /** + * Options for {@link LanguageModelTool.prepareToolInvocation}. + */ + export interface LanguageModelToolInvocationPrepareOptions { + /** + * The name of the participant invoking the tool. + * TODO@API keep this? + */ + participantName: string; /** - * This can be implemented to customize the message shown to the user when a tool requires confirmation. + * The parameters that the tool is being invoked with. */ - provideToolConfirmationMessages?(options: LanguageModelToolProvideConfirmationMessageOptions, token: CancellationToken): Thenable; - - /** - * This message will be shown with the progress notification when the tool is invoked in a chat session. - */ - provideToolInvocationMessage?(parameters: any, token: CancellationToken): Thenable; + parameters: T; } + /** + * A tool that can be invoked by a call to a {@link LanguageModelChat}. + */ + export interface LanguageModelTool { + /** + * Invoke the tool with the given parameters and return a result. + */ + invoke(options: LanguageModelToolInvocationOptions, token: CancellationToken): ProviderResult; + + /** + * Called once before a tool is invoked. May be implemented to customize the progress message that appears while the tool + * is running, and the messages that appear when the tool needs confirmation. + */ + prepareToolInvocation?(options: LanguageModelToolInvocationPrepareOptions, token: CancellationToken): ProviderResult; + } + + /** + * The result of a call to {@link LanguageModelTool.prepareToolInvocation}. + */ + export interface PreparedToolInvocation { + /** + * A customized progress message to show while the tool runs. + */ + invocationMessage?: string; + + /** + * Customized messages to show when asking for user confirmation to run the tool. + */ + confirmationMessages?: LanguageModelToolConfirmationMessages; + } + + /** + * A reference to a tool attached to a user's request. + */ export interface ChatLanguageModelToolReference { /** * The tool's ID. Refers to a tool listed in {@link lm.tools}. @@ -197,10 +276,11 @@ declare module 'vscode' { readonly id: string; /** - * The start and end index of the reference in the {@link ChatRequest.prompt prompt}. When undefined, the reference was not part of the prompt text. + * The start and end index of the reference in the {@link ChatRequest.prompt prompt}. When undefined, the reference was + * not part of the prompt text. * - * *Note* that the indices take the leading `#`-character into account which means they can - * used to modify the prompt as-is. + * *Note* that the indices take the leading `#`-character into account which means they can be used to modify the prompt + * as-is. */ readonly range?: [start: number, end: number]; } @@ -209,12 +289,11 @@ declare module 'vscode' { /** * The list of tools that the user attached to their request. * - * *Note* that if tools are referenced in the text of the prompt, using `#`, the prompt contains - * references as authored and that it is up to the participant - * to further modify the prompt, for instance by inlining reference values or creating links to - * headings which contain the resolved values. References are sorted in reverse by their range - * in the prompt. That means the last reference in the prompt is the first in this list. This simplifies - * string-manipulation of the prompt. + * *Note* that if tools are referenced in the text of the prompt, using `#`, the prompt contains references as authored + * and it is up to the participant to further modify the prompt, for instance by inlining reference values or + * creating links to headings which contain the resolved values. References are sorted in reverse by their range in the + * prompt. That means the last reference in the prompt is the first in this list. This simplifies string-manipulation of + * the prompt. */ readonly toolReferences: readonly ChatLanguageModelToolReference[];