Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions src/extension/conversation/vscode-node/resolveModelId.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/

const autoModelId = 'copilot/auto';

/**
* When the user has selected the "auto" model, replace the modelId with
* the actual model that served the request so that downstream telemetry
* reflects the resolved model rather than the opaque "copilot/auto" identifier.
*/
export function resolveModelIdForTelemetry(modelId: string, resolvedModel: string | undefined): string {
return modelId === autoModelId ? (resolvedModel || autoModelId) : modelId;
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/

import { describe, expect, test } from 'vitest';
import { resolveModelIdForTelemetry } from '../resolveModelId';

/**
* Tests the resolveModelIdForTelemetry helper used by copy/insert/apply
* telemetry events to substitute 'copilot/auto' with the actual resolved model.
*
* Integration tests verifying that resolvedModel is propagated through
* DefaultIntentRequestHandler into result metadata live in
* defaultIntentRequestHandler.spec.ts.
*/
describe('resolveModelIdForTelemetry', () => {
test('returns resolvedModel when modelId is copilot/auto', () => {
expect(resolveModelIdForTelemetry('copilot/auto', 'gpt-4o')).toBe('gpt-4o');
});

test('falls back to copilot/auto when resolvedModel is undefined', () => {
expect(resolveModelIdForTelemetry('copilot/auto', undefined)).toBe('copilot/auto');
});

test('falls back to copilot/auto when resolvedModel is empty string', () => {
expect(resolveModelIdForTelemetry('copilot/auto', '')).toBe('copilot/auto');
});

test('returns original modelId when not copilot/auto', () => {
expect(resolveModelIdForTelemetry('gpt-4o', 'gpt-4o-2024-05-13')).toBe('gpt-4o');
});

test('returns original modelId when not copilot/auto and no resolvedModel', () => {
expect(resolveModelIdForTelemetry('claude-sonnet-4', undefined)).toBe('claude-sonnet-4');
});

test('does not substitute for empty modelId', () => {
expect(resolveModelIdForTelemetry('', 'gpt-4o')).toBe('');
});
});
5 changes: 3 additions & 2 deletions src/extension/conversation/vscode-node/userActions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import { EditCodeStepTurnMetaData } from '../../intents/node/editCodeStep';
import { Conversation, ICopilotChatResultIn } from '../../prompt/common/conversation';
import { IFeedbackReporter } from '../../prompt/node/feedbackReporter';
import { sendUserActionTelemetry } from '../../prompt/node/telemetry';
import { resolveModelIdForTelemetry } from './resolveModelId';

export const IUserFeedbackService = createServiceIdentifier<IUserFeedbackService>('IUserFeedbackService');
export interface IUserFeedbackService {
Expand Down Expand Up @@ -277,7 +278,7 @@ export class UserFeedbackService implements IUserFeedbackService {
headerRequestId: result.metadata?.responseId ?? '',
participant: agentId,
languageId: e.action.languageId ?? '',
modelId: e.action.modelId ?? '',
modelId: resolveModelIdForTelemetry(e.action.modelId ?? '', result.metadata?.resolvedModel),
comp_type: compType,
mode: participantIdToModeName(agentId),
},
Expand All @@ -302,7 +303,7 @@ export class UserFeedbackService implements IUserFeedbackService {
headerRequestId: result.metadata?.responseId ?? '',
participant: agentId,
languageId: e.languageId ?? '',
modelId: e.modelId,
modelId: resolveModelIdForTelemetry(e.modelId, result.metadata?.resolvedModel),
mode: participantIdToModeName(agentId),
},
{
Expand Down
2 changes: 2 additions & 0 deletions src/extension/prompt/common/conversation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -363,6 +363,8 @@ export interface IResultMetadata {
toolCallResults?: Record<string, LanguageModelToolResult>;
maxToolCallsExceeded?: boolean;
summary?: { toolCallRoundId: string; text: string };
/** The actual model used to generate the response, which may differ from the requested model (e.g., when 'auto' resolves to a specific model) */
resolvedModel?: string;
/** Prompt tokens from the language model (e.g., Anthropic Messages API) */
promptTokens?: number;
/** Output tokens from the language model (e.g., Anthropic Messages API) */
Expand Down
1 change: 1 addition & 0 deletions src/extension/prompt/node/defaultIntentRequestHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,7 @@ export class DefaultIntentRequestHandler {
const metadataFragment: Partial<IResultMetadata> = {
toolCallRounds: resultDetails.toolCallRounds,
toolCallResults: this._collectRelevantToolCallResults(resultDetails.toolCallRounds, resultDetails.toolCallResults),
resolvedModel: resultDetails.response.type === ChatFetchResponseType.Success ? resultDetails.response.resolvedModel : undefined,
};
Comment on lines 154 to 158
Copy link

Copilot AI Mar 4, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There’s no unit test asserting that resolvedModel is actually propagated into chatResult.metadata from a successful ChatResponse. Since defaultIntentRequestHandler.spec.ts already covers getResult(), it would be better to add an assertion there (e.g., set the test IChatMLFetcher to return a non-empty resolvedModel and verify it appears in the returned metadata).

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Fixed

mixin(chatResult, { metadata: metadataFragment }, true);
const baseModelTelemetry = createTelemetryWithId();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
exports[`defaultIntentRequestHandler > ChatResult metadata after multiple turns only has tool results from current turn 1`] = `
{
"codeBlocks": [],
"resolvedModel": "",
"toolCallResults": {
"tool_call_id_0__vscode-0": LanguageModelToolResult {
"content": [
Expand Down Expand Up @@ -78,6 +79,7 @@ exports[`defaultIntentRequestHandler > ChatResult metadata after multiple turns
exports[`defaultIntentRequestHandler > ChatResult metadata after multiple turns only has tool results from current turn 2`] = `
{
"codeBlocks": [],
"resolvedModel": "",
"toolCallResults": {
"tool_call_id_2__vscode-2": LanguageModelToolResult {
"content": [
Expand Down Expand Up @@ -182,6 +184,7 @@ exports[`defaultIntentRequestHandler > confirms on max tool call iterations, and
"metadata": {
"codeBlocks": [],
"maxToolCallsExceeded": true,
"resolvedModel": "",
"toolCallResults": {
"tool_call_id_0__vscode-0": LanguageModelToolResult {
"content": [
Expand Down Expand Up @@ -299,6 +302,7 @@ exports[`defaultIntentRequestHandler > confirms on max tool call iterations, and
"metadata": {
"codeBlocks": [],
"maxToolCallsExceeded": true,
"resolvedModel": "",
"toolCallResults": {
"tool_call_id_4__vscode-4": LanguageModelToolResult {
"content": [
Expand Down Expand Up @@ -2518,6 +2522,7 @@ exports[`defaultIntentRequestHandler > makes a successful request with a single
{
"metadata": {
"codeBlocks": [],
"resolvedModel": "",
"toolCallResults": undefined,
"toolCallRounds": [
ToolCallRound {
Expand Down Expand Up @@ -2840,6 +2845,7 @@ exports[`defaultIntentRequestHandler > makes a tool call turn 1`] = `
{
"metadata": {
"codeBlocks": [],
"resolvedModel": "",
"toolCallResults": {
"tool_call_id__vscode-0": LanguageModelToolResult {
"content": [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ suite('defaultIntentRequestHandler', () => {
let chatResponse: (string | IResponseDelta[])[] = [];
let promptResult: RenderPromptResult | RenderPromptResult[];
let telemetry: SpyingTelemetryService;
let fetcher: StaticChatMLFetcher;
let endpoint: IChatEndpoint;
let turnIdCounter = 0;
let builtPrompts: IBuildPromptContext[] = [];
Expand All @@ -52,8 +53,9 @@ suite('defaultIntentRequestHandler', () => {
const services = createExtensionUnitTestingServices();
telemetry = new SpyingTelemetryService();
chatResponse = [];
fetcher = new StaticChatMLFetcher(chatResponse);
services.define(ITelemetryService, telemetry);
services.define(IChatMLFetcher, new StaticChatMLFetcher(chatResponse));
services.define(IChatMLFetcher, fetcher);
services.define(IWorkspaceFileIndex, new SyncDescriptor(NullWorkspaceFileIndex));

accessor = services.createTestingAccessor();
Expand Down Expand Up @@ -189,6 +191,19 @@ suite('defaultIntentRequestHandler', () => {
expect(getDerandomizedTelemetry()).toMatchSnapshot();
});

test('propagates resolvedModel into result metadata from a successful response', async () => {
fetcher.resolvedModel = 'gpt-4o-resolved';
const handler = makeHandler();
chatResponse[0] = 'some response here :)';
promptResult = {
...nullRenderPromptResult(),
messages: [{ role: Raw.ChatRole.User, content: [toTextPart('hello world!')] }],
};

const result = await handler.getResult();
expect(result.metadata?.resolvedModel).toBe('gpt-4o-resolved');
});

test('makes a tool call turn', async () => {
const handler = makeHandler();
chatResponse[0] = [{
Expand Down
3 changes: 2 additions & 1 deletion src/platform/chat/test/common/staticChatMLFetcher.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ export class StaticChatMLFetcher implements IChatMLFetcher {
_serviceBrand: undefined;
onDidMakeChatMLRequest = Event.None;
private reqs = 0;
public resolvedModel = '';

constructor(public readonly value: StaticChatMLFetcherInput) { }

Expand Down Expand Up @@ -43,7 +44,7 @@ export class StaticChatMLFetcher implements IChatMLFetcher {
responseSoFar += chunks[i].text;
}

return { type: ChatFetchResponseType.Success, requestId: '', serverRequestId: '', usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0, prompt_tokens_details: { cached_tokens: 0 } }, value: responseSoFar, resolvedModel: '' };
return { type: ChatFetchResponseType.Success, requestId: '', serverRequestId: '', usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0, prompt_tokens_details: { cached_tokens: 0 } }, value: responseSoFar, resolvedModel: this.resolvedModel };
}

async fetchMany(): Promise<ChatResponses> {
Expand Down