Skip to content

Commit

Permalink
tools API tweaks (#232023)
Browse files Browse the repository at this point in the history
* update ctor order of `LanguageModelToolCallPart`

fixes #231925

* 💄 make `LanguageModelToolResultPart` follow after `LanguageModelToolCallPart`

* use `never` for `ChatParticipantToolToken` so that nothing can be assigned to it

#231937

* polish doc

* keep API runtime stable

#231938

* More updates

* Fix build

* Add validation for toolInvocationToken

---------

Co-authored-by: Rob Lourens <[email protected]>
  • Loading branch information
jrieken and roblourens authored Oct 23, 2024
1 parent f67ffbd commit 86a4d1d
Show file tree
Hide file tree
Showing 7 changed files with 93 additions and 50 deletions.
15 changes: 7 additions & 8 deletions src/vs/workbench/api/common/extHostLanguageModelTools.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import { IDisposable, toDisposable } from '../../../base/common/lifecycle.js';
import { revive } from '../../../base/common/marshalling.js';
import { generateUuid } from '../../../base/common/uuid.js';
import { IExtensionDescription } from '../../../platform/extensions/common/extensions.js';
import { IPreparedToolInvocation, IToolInvocation, IToolInvocationContext, IToolResult } from '../../contrib/chat/common/languageModelToolsService.js';
import { IPreparedToolInvocation, isToolInvocationContext, IToolInvocation, IToolInvocationContext, IToolResult } from '../../contrib/chat/common/languageModelToolsService.js';
import { ExtHostLanguageModelToolsShape, IMainContext, IToolDataDto, MainContext, MainThreadLanguageModelToolsShape } from './extHost.protocol.js';
import * as typeConvert from './extHostTypeConverters.js';

Expand Down Expand Up @@ -48,6 +48,11 @@ export class ExtHostLanguageModelTools implements ExtHostLanguageModelToolsShape
if (options.tokenizationOptions) {
this._tokenCountFuncs.set(callId, options.tokenizationOptions.countTokens);
}

if (options.toolInvocationToken && !isToolInvocationContext(options.toolInvocationToken)) {
throw new Error(`Invalid tool invocation token`);
}

try {
// Making the round trip here because not all tools were necessarily registered in this EH
const result = await this._proxy.$invokeTool({
Expand Down Expand Up @@ -81,7 +86,7 @@ export class ExtHostLanguageModelTools implements ExtHostLanguageModelToolsShape
throw new Error(`Unknown tool ${dto.toolId}`);
}

const options: vscode.LanguageModelToolInvocationOptions<Object> = { parameters: dto.parameters, toolInvocationToken: dto.context };
const options: vscode.LanguageModelToolInvocationOptions<Object> = { parameters: dto.parameters, toolInvocationToken: dto.context as vscode.ChatParticipantToolToken | undefined };
if (dto.tokenBudget !== undefined) {
options.tokenizationOptions = {
tokenBudget: dto.tokenBudget,
Expand All @@ -90,12 +95,6 @@ export class ExtHostLanguageModelTools implements ExtHostLanguageModelToolsShape
};
}

// Some participant in extHostChatAgents calls invokeTool, goes to extHostLMTools
// mainThreadLMTools invokes the tool, which calls back to extHostLMTools
// The tool requests permission
// The tool in extHostLMTools calls for permission back to mainThreadLMTools
// And back to extHostLMTools, and back to the participant in extHostChatAgents
// Is there a tool call ID to identify the call?
const extensionResult = await raceCancellation(Promise.resolve(item.tool.invoke(options, token)), token);
if (!extensionResult) {
throw new CancellationError();
Expand Down
2 changes: 1 addition & 1 deletion src/vs/workbench/api/common/extHostLanguageModels.ts
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ class LanguageModelResponse {
if (fragment.part.type === 'text') {
out = new extHostTypes.LanguageModelTextPart(fragment.part.value);
} else {
out = new extHostTypes.LanguageModelToolCallPart(fragment.part.name, fragment.part.toolCallId, fragment.part.parameters);
out = new extHostTypes.LanguageModelToolCallPart(fragment.part.toolCallId, fragment.part.name, fragment.part.parameters);
}
res.stream.emitOne(out);
}
Expand Down
4 changes: 2 additions & 2 deletions src/vs/workbench/api/common/extHostTypeConverters.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2361,7 +2361,7 @@ export namespace LanguageModelChatMessage {
});
return new types.LanguageModelToolResultPart(c.toolCallId, content, c.isError);
} else {
return new types.LanguageModelToolCallPart(c.name, c.toolCallId, c.parameters);
return new types.LanguageModelToolCallPart(c.toolCallId, c.name, c.parameters);
}
});
const role = LanguageModelChatMessageRole.to(message.role);
Expand Down Expand Up @@ -2788,7 +2788,7 @@ export namespace ChatAgentRequest {
acceptedConfirmationData: request.acceptedConfirmationData,
rejectedConfirmationData: request.rejectedConfirmationData,
location2,
toolInvocationToken: Object.freeze({ sessionId: request.sessionId }),
toolInvocationToken: Object.freeze({ sessionId: request.sessionId }) as never,
model
};
}
Expand Down
33 changes: 27 additions & 6 deletions src/vs/workbench/api/common/extHostTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4608,7 +4608,21 @@ export class LanguageModelChatMessage implements vscode.LanguageModelChatMessage

role: vscode.LanguageModelChatMessageRole;

content: (LanguageModelTextPart | LanguageModelToolResultPart | LanguageModelToolCallPart)[];
private _content: (LanguageModelTextPart | LanguageModelToolResultPart | LanguageModelToolCallPart)[] = [];

set content(value: string | (LanguageModelTextPart | LanguageModelToolResultPart | LanguageModelToolCallPart)[]) {
if (typeof value === 'string') {
// we changed this and still support setting content with a string property. this keep the API runtime stable
// despite the breaking change in the type definition.
this._content = [new LanguageModelTextPart(value)];
} else {
this._content = value;
}
}

get content(): (LanguageModelTextPart | LanguageModelToolResultPart | LanguageModelToolCallPart)[] {
return this._content;
}

// Temp to avoid breaking changes
set content2(value: (string | LanguageModelToolResultPart | LanguageModelToolCallPart)[] | undefined) {
Expand All @@ -4635,19 +4649,26 @@ export class LanguageModelChatMessage implements vscode.LanguageModelChatMessage

constructor(role: vscode.LanguageModelChatMessageRole, content: string | (LanguageModelTextPart | LanguageModelToolResultPart | LanguageModelToolCallPart)[], name?: string) {
this.role = role;
this.content = typeof content === 'string' ? [new LanguageModelTextPart(content)] : content;
this.content = content;
this.name = name;
}
}

export class LanguageModelToolCallPart implements vscode.LanguageModelToolCallPart {
name: string;
callId: string;
name: string;
parameters: any;

constructor(name: string, toolCallId: string, parameters: any) {
this.name = name;
this.callId = toolCallId;
constructor(callId: string, name: string, parameters: any) {
// TODO TEMP- swapped the order of these two arguments, trying to preserve the behavior for a build or two
if (name.startsWith('call_')) {
this.name = callId;
this.callId = name;
} else {
this.callId = callId;
this.name = name;
}

this.parameters = parameters;
}
}
Expand Down
10 changes: 9 additions & 1 deletion src/vs/workbench/api/test/browser/extHostTypes.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -778,7 +778,7 @@ suite('ExtHostTypes', function () {

test('No longer possible to set content on LanguageModelChatMessage', function () {
const m = types.LanguageModelChatMessage.Assistant('');
m.content = [new types.LanguageModelToolCallPart('toolCall.tool.name', 'toolCall.call.callId', 'toolCall.call.parameters')];
m.content = [new types.LanguageModelToolCallPart('toolCall.call.callId', 'toolCall.tool.name', 'toolCall.call.parameters')];

assert.equal(m.content.length, 1);
assert.equal(m.content2?.length, 1);
Expand All @@ -791,4 +791,12 @@ suite('ExtHostTypes', function () {
assert.equal(m.content2?.length, 1);
assert.ok(typeof m.content2[0] === 'string');
});

test('runtime stable, type-def changed', function () {
// see https://github.com/microsoft/vscode/issues/231938
const m = new types.LanguageModelChatMessage(types.LanguageModelChatMessageRole.User, []);
assert.deepStrictEqual(m.content, []);
m.content = 'Hello';
assert.deepStrictEqual(m.content, [new types.LanguageModelTextPart('Hello')]);
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,10 @@ export interface IToolInvocationContext {
sessionId: string;
}

export function isToolInvocationContext(obj: any): obj is IToolInvocationContext {
return typeof obj === 'object' && typeof obj.sessionId === 'string';
}

export interface IToolResult {
content: (IToolResultPromptTsxPart | IToolResultTextPart)[];
}
Expand Down
75 changes: 43 additions & 32 deletions src/vscode-dts/vscode.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19801,14 +19801,14 @@ declare module 'vscode' {
*/
export class LanguageModelToolCallPart {
/**
* The name of the tool to call.
* The ID of the tool call. This is a unique identifier for the tool call within the chat request.
*/
name: string;
callId: string;

/**
* The ID of the tool call. This is a unique identifier for the tool call within the chat request.
* The name of the tool to call.
*/
callId: string;
name: string;

/**
* The parameters with which to call the tool.
Expand All @@ -19817,8 +19817,36 @@ declare module 'vscode' {

/**
* Create a new LanguageModelToolCallPart.
*
* @param callId The ID of the tool call.
* @param name The name of the tool to call.
* @param parameters The parameters with which to call the tool.
*/
constructor(callId: string, name: string, parameters: object);
}

/**
* The result of a tool call. This is the counterpart of a {@link LanguageModelToolCallPart tool call} and
* it can only be included in the content of a User message
*/
export class LanguageModelToolResultPart {
/**
* The ID of the tool call.
*
* *Note* that this should match the {@link LanguageModelToolCallPart.callId callId} of a tool call part.
*/
callId: string;

/**
* The value of the tool result.
*/
content: (LanguageModelTextPart | LanguageModelPromptTsxPart | unknown)[];

/**
* @param callId The ID of the tool call.
* @param content The content of the tool result.
*/
constructor(name: string, callId: string, parameters: object);
constructor(callId: string, content: (LanguageModelTextPart | LanguageModelPromptTsxPart | unknown)[]);
}

/**
Expand Down Expand Up @@ -19854,27 +19882,6 @@ declare module 'vscode' {
constructor(value: unknown);
}

/**
* The result of a tool call. Can only be included in the content of a User message.
*/
export class LanguageModelToolResultPart {
/**
* The ID of the tool call.
*/
callId: string;

/**
* The value of the tool result.
*/
content: (LanguageModelTextPart | LanguageModelPromptTsxPart | unknown)[];

/**
* @param callId The ID of the tool call.
* @param content The content of the tool result.
*/
constructor(callId: string, content: (LanguageModelTextPart | LanguageModelPromptTsxPart | unknown)[]);
}

/**
* A result returned from a tool invocation. If using `@vscode/prompt-tsx`, this result may be rendered using a `ToolResult`.
*/
Expand All @@ -19896,19 +19903,23 @@ declare module 'vscode' {
/**
* A token that can be passed to {@link lm.invokeTool} when invoking a tool inside the context of handling a chat request.
*/
export type ChatParticipantToolToken = unknown;
export type ChatParticipantToolToken = never;

/**
* Options provided for tool invocation.
*/
export interface LanguageModelToolInvocationOptions<T> {
/**
* When this tool is being invoked by a {@link ChatParticipant} within the context of a chat request, this token should be
* passed from {@link ChatRequest.toolInvocationToken}. In that case, a progress bar will be automatically shown for the
* tool invocation in the chat response view, and if the tool requires user confirmation, it will show up inline in the
* chat view. If the tool is being invoked outside of a chat request, `undefined` should be passed instead.
* An opaque object that ties a tool invocation to a chat request from a {@link ChatParticipant chat participant}.
*
* The _only_ way to get a valid tool invocation token is using the provided {@link ChatRequest.toolInvocationToken toolInvocationToken}
* from a chat request. In that case, a progress bar will be automatically shown for the tool invocation in the chat response view, and if
* the tool requires user confirmation, it will show up inline in the chat view.
*
* If the tool is being invoked outside of a chat request, `undefined` should be passed instead, and no special UI except for
* confirmations will be shown.
*
* If a tool invokes another tool during its invocation, it can pass along the `toolInvocationToken` that it received.
* *Note* that a tool that invokes another tool during its invocation, can pass along the `toolInvocationToken` that it received.
*/
toolInvocationToken: ChatParticipantToolToken | undefined;

Expand Down

0 comments on commit 86a4d1d

Please sign in to comment.