diff --git a/.changeset/cool-cloths-fry.md b/.changeset/cool-cloths-fry.md new file mode 100644 index 00000000..140e5fea --- /dev/null +++ b/.changeset/cool-cloths-fry.md @@ -0,0 +1,6 @@ +--- +'@openai/agents-openai': patch +'@openai/agents-core': patch +--- + +fix: support snake_case usage fields from OpenAI responses diff --git a/packages/agents-core/src/usage.ts b/packages/agents-core/src/usage.ts index 82bd9c72..e9894d2c 100644 --- a/packages/agents-core/src/usage.ts +++ b/packages/agents-core/src/usage.ts @@ -1,5 +1,15 @@ import { UsageData } from './types/protocol'; +type UsageInput = Partial< + UsageData & { + input_tokens: number; + output_tokens: number; + total_tokens: number; + input_tokens_details: object; + output_tokens_details: object; + } +> & { requests?: number }; + /** * Tracks token usage and request counts for an agent run. */ @@ -34,7 +44,7 @@ export class Usage { */ public outputTokensDetails: Array> = []; - constructor(input?: Partial & { requests?: number }) { + constructor(input?: UsageInput) { if (typeof input === 'undefined') { this.requests = 0; this.inputTokens = 0; @@ -44,14 +54,18 @@ export class Usage { this.outputTokensDetails = []; } else { this.requests = input?.requests ?? 1; - this.inputTokens = input?.inputTokens ?? 0; - this.outputTokens = input?.outputTokens ?? 0; - this.totalTokens = input?.totalTokens ?? 0; - this.inputTokensDetails = input?.inputTokensDetails - ? [input.inputTokensDetails] + this.inputTokens = input?.inputTokens ?? input?.input_tokens ?? 0; + this.outputTokens = input?.outputTokens ?? input?.output_tokens ?? 0; + this.totalTokens = input?.totalTokens ?? input?.total_tokens ?? 0; + const inputTokensDetails = + input?.inputTokensDetails ?? input?.input_tokens_details; + this.inputTokensDetails = inputTokensDetails + ? [inputTokensDetails as Record] : []; - this.outputTokensDetails = input?.outputTokensDetails - ? [input.outputTokensDetails] + const outputTokensDetails = + input?.outputTokensDetails ?? input?.output_tokens_details; + this.outputTokensDetails = outputTokensDetails + ? [outputTokensDetails as Record] : []; } } diff --git a/packages/agents-core/test/usage.test.ts b/packages/agents-core/test/usage.test.ts index 1a3d864a..dc215a93 100644 --- a/packages/agents-core/test/usage.test.ts +++ b/packages/agents-core/test/usage.test.ts @@ -26,6 +26,24 @@ describe('Usage', () => { expect(usage.totalTokens).toBe(15); }); + it('falls back to snake_case fields', () => { + const usage = new Usage({ + requests: 2, + input_tokens: 7, + output_tokens: 3, + total_tokens: 10, + input_tokens_details: { foo: 1 }, + output_tokens_details: { bar: 2 }, + }); + + expect(usage.requests).toBe(2); + expect(usage.inputTokens).toBe(7); + expect(usage.outputTokens).toBe(3); + expect(usage.totalTokens).toBe(10); + expect(usage.inputTokensDetails).toEqual([{ foo: 1 }]); + expect(usage.outputTokensDetails).toEqual([{ bar: 2 }]); + }); + it('adds other Usage instances correctly', () => { const usageA = new Usage({ inputTokens: 1, diff --git a/packages/agents-openai/test/openaiChatCompletionsModel.test.ts b/packages/agents-openai/test/openaiChatCompletionsModel.test.ts index 18ed14de..eade88cc 100644 --- a/packages/agents-openai/test/openaiChatCompletionsModel.test.ts +++ b/packages/agents-openai/test/openaiChatCompletionsModel.test.ts @@ -79,6 +79,32 @@ describe('OpenAIChatCompletionsModel', () => { ]); }); + it('parses usage tokens from snake_case fields', async () => { + const client = new FakeClient(); + const response = { + id: 'r', + choices: [{ message: { content: 'hi' } }], + usage: { prompt_tokens: 11, completion_tokens: 7, total_tokens: 18 }, + } as any; + client.chat.completions.create.mockResolvedValue(response); + + const model = new OpenAIChatCompletionsModel(client as any, 'gpt'); + const req: any = { + input: 'u', + modelSettings: {}, + tools: [], + outputType: 'text', + handoffs: [], + tracing: false, + }; + + const result = await withTrace('t', () => model.getResponse(req)); + + expect(result.usage.inputTokens).toBe(11); + expect(result.usage.outputTokens).toBe(7); + expect(result.usage.totalTokens).toBe(18); + }); + it('outputs message when content is empty string', async () => { const client = new FakeClient(); const response = {