diff --git a/.changeset/light-apes-fetch.md b/.changeset/light-apes-fetch.md
new file mode 100644
index 000000000000..05e19dce2271
--- /dev/null
+++ b/.changeset/light-apes-fetch.md
@@ -0,0 +1,6 @@
+---
+'@ai-sdk/openai': patch
+'@ai-sdk/azure': patch
+---
+
+Set the annotations from the Responses API to doStream
diff --git a/examples/ai-core/src/stream-text/azure-responses-code-interpreter.ts b/examples/ai-core/src/stream-text/azure-responses-code-interpreter.ts
index 86463d4eecb2..b19f10a124d1 100644
--- a/examples/ai-core/src/stream-text/azure-responses-code-interpreter.ts
+++ b/examples/ai-core/src/stream-text/azure-responses-code-interpreter.ts
@@ -12,9 +12,9 @@ import 'dotenv/config';
async function main() {
// Basic text generation
const result = streamText({
- model: azure.responses('gpt-5-mini'), // use your own deployment
+ model: azure.responses('gpt-4.1-mini'), // use your own deployment
prompt:
- 'Create a program that generates five random numbers between 1 and 100 with two decimal places, and show me the execution results.',
+ 'Create a program that generates five random numbers between 1 and 100 with two decimal places, and show me the execution results. Also save the result to a file.',
tools: {
code_interpreter: azure.tools.codeInterpreter(),
},
@@ -27,6 +27,15 @@ async function main() {
console.log('\n=== Other Outputs ===');
console.log(await result.toolCalls);
console.log(await result.toolResults);
+ console.log('\n=== Code Interpreter Annotations ===');
+ for await (const part of result.fullStream) {
+ if (part.type === 'text-end') {
+ const annotations = part.providerMetadata?.openai?.annotations;
+ if (annotations) {
+ console.dir(annotations);
+ }
+ }
+ }
}
main().catch(console.error);
diff --git a/examples/ai-core/src/stream-text/openai-responses-code-interpreter.ts b/examples/ai-core/src/stream-text/openai-responses-code-interpreter.ts
index 9b229937f417..6b6b3dcff63d 100644
--- a/examples/ai-core/src/stream-text/openai-responses-code-interpreter.ts
+++ b/examples/ai-core/src/stream-text/openai-responses-code-interpreter.ts
@@ -7,7 +7,7 @@ async function main() {
const result = streamText({
model: openai.responses('gpt-4.1-mini'),
prompt:
- 'Create a program that generates five random numbers between 1 and 100 with two decimal places, and show me the execution results.',
+ 'Create a program that generates five random numbers between 1 and 100 with two decimal places, and show me the execution results. Also save the result to a file.',
tools: {
code_interpreter: openai.tools.codeInterpreter({}),
},
@@ -20,6 +20,15 @@ async function main() {
console.log('\n=== Other Outputs ===');
console.log(await result.toolCalls);
console.log(await result.toolResults);
+ console.log('\n=== Code Interpreter Annotations ===');
+ for await (const part of result.fullStream) {
+ if (part.type === 'text-end') {
+ const annotations = part.providerMetadata?.openai?.annotations;
+ if (annotations) {
+ console.dir(annotations);
+ }
+ }
+ }
}
main().catch(console.error);
diff --git a/examples/next-openai/app/test-openai-code-interpreter/page.tsx b/examples/next-openai/app/test-openai-code-interpreter/page.tsx
index 83ef3565ca9d..9138ed87ff12 100644
--- a/examples/next-openai/app/test-openai-code-interpreter/page.tsx
+++ b/examples/next-openai/app/test-openai-code-interpreter/page.tsx
@@ -5,6 +5,7 @@ import { DefaultChatTransport } from 'ai';
import ChatInput from '@/components/chat-input';
import { OpenAICodeInterpreterMessage } from '@/app/api/chat-openai-code-interpreter/route';
import CodeInterpreterView from '@/components/tool/openai-code-interpreter-view';
+import { OpenaiResponsesText } from '@/components/tool/openai-responses-text';
export default function TestOpenAIWebSearch() {
const { status, sendMessage, messages } =
@@ -24,7 +25,7 @@ export default function TestOpenAIWebSearch() {
{message.parts.map((part, index) => {
switch (part.type) {
case 'text':
- return
{part.text}
;
+ return ;
case 'tool-code_interpreter':
return ;
}
diff --git a/examples/next-openai/components/tool/openai-responses-text.tsx b/examples/next-openai/components/tool/openai-responses-text.tsx
new file mode 100644
index 000000000000..a4963b52a2bf
--- /dev/null
+++ b/examples/next-openai/components/tool/openai-responses-text.tsx
@@ -0,0 +1,82 @@
+'use client';
+
+import { Response } from '@/components/ai-elements/response';
+import { TextUIPart } from 'ai';
+import { z } from 'zod/v4';
+
+export const openaiResponsesTextUIPartProviderMetadataSchema = z.object({
+ openai: z.object({
+ itemId: z.string(),
+ annotations: z
+ .array(
+ z.discriminatedUnion('type', [
+ z.object({
+ type: z.literal('url_citation'),
+ url: z.string(),
+ title: z.string(),
+ start_index: z.number(),
+ end_index: z.number(),
+ }),
+ z.object({
+ type: z.literal('file_citation'),
+ file_id: z.string(),
+ filename: z.string(),
+ index: z.number(),
+ quote: z.string().nullish(),
+ }),
+ z.object({
+ type: z.literal('container_file_citation'),
+ container_id: z.string(),
+ file_id: z.string(),
+ filename: z.string(),
+ start_index: z.number(),
+ end_index: z.number(),
+ }),
+ ]),
+ )
+ .optional(),
+ }),
+});
+
+export function OpenaiResponsesText({ part }: { part: TextUIPart }) {
+ if (!part.providerMetadata) return {part.text};
+
+ const providerMetadataParsed =
+ openaiResponsesTextUIPartProviderMetadataSchema.safeParse(
+ part.providerMetadata,
+ );
+
+ if (!providerMetadataParsed.success) return {part.text};
+
+ const { annotations } = providerMetadataParsed.data.openai;
+ if (!annotations) return {part.text};
+
+ const baseUrl = typeof window !== 'undefined' ? window.location.origin : '';
+
+ // Sort annotations by start_index in descending order to process from end to start.
+ // This ensures that string modifications don't invalidate indices of earlier annotations.
+ const sortedAnnotations = [...annotations].sort((a, b) => {
+ const aStart = 'start_index' in a ? a.start_index : -1;
+ const bStart = 'start_index' in b ? b.start_index : -1;
+ return bStart - aStart;
+ });
+
+ const text = sortedAnnotations.reduce((acc, cur) => {
+ const text = (() => {
+ switch (cur.type) {
+ case 'container_file_citation':
+ if (cur.start_index === 0 && cur.end_index === 0) return acc;
+ return (
+ acc.slice(0, cur.start_index) +
+ `${baseUrl}/api/download-container-file?container_id=${encodeURIComponent(cur.container_id)}&file_id=${encodeURIComponent(cur.file_id)}&filename=${encodeURIComponent(cur.filename)}` +
+ acc.slice(cur.end_index)
+ );
+ default:
+ return acc;
+ }
+ })();
+ return text;
+ }, part.text);
+
+ return {text};
+}
diff --git a/packages/azure/src/__snapshots__/azure-openai-provider.test.ts.snap b/packages/azure/src/__snapshots__/azure-openai-provider.test.ts.snap
index c23aa6d892cc..0234d0f623d9 100644
--- a/packages/azure/src/__snapshots__/azure-openai-provider.test.ts.snap
+++ b/packages/azure/src/__snapshots__/azure-openai-provider.test.ts.snap
@@ -2059,6 +2059,11 @@ Generated numbers (formatted to 2 decimal places): 88.99, 51.21, 89.85, 88.97, 7
},
{
"id": "msg_0ad69c3c5fcb01f60068eba7956c948193be6ab573376051d6",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_0ad69c3c5fcb01f60068eba7956c948193be6ab573376051d6",
+ },
+ },
"type": "text-end",
},
{
@@ -2603,6 +2608,19 @@ providers and models, and which ones are available in the AI SDK.",
},
{
"id": "msg_06456cb9918b63780068cacd7c922081a1ae15f2672a51980f",
+ "providerMetadata": {
+ "openai": {
+ "annotations": [
+ {
+ "file_id": "file-Ebzhf8H4DPGPr9pUhr7n7v",
+ "filename": "ai.pdf",
+ "index": 379,
+ "type": "file_citation",
+ },
+ ],
+ "itemId": "msg_06456cb9918b63780068cacd7c922081a1ae15f2672a51980f",
+ },
+ },
"type": "text-end",
},
{
@@ -3109,6 +3127,25 @@ exports[`responses > file search tool > should stream file search results withou
},
{
"id": "msg_0459517ad68504ad0068cabfc6b5c48192a15ac773668537f1",
+ "providerMetadata": {
+ "openai": {
+ "annotations": [
+ {
+ "file_id": "file-Ebzhf8H4DPGPr9pUhr7n7v",
+ "filename": "ai.pdf",
+ "index": 154,
+ "type": "file_citation",
+ },
+ {
+ "file_id": "file-Ebzhf8H4DPGPr9pUhr7n7v",
+ "filename": "ai.pdf",
+ "index": 382,
+ "type": "file_citation",
+ },
+ ],
+ "itemId": "msg_0459517ad68504ad0068cabfc6b5c48192a15ac773668537f1",
+ },
+ },
"type": "text-end",
},
{
@@ -4976,6 +5013,20 @@ exports[`responses > web search preview tool > should stream web search preview
},
{
"id": "msg_0dcf1a118189f28100691daa5a28488194b14fd6fa1d2895be",
+ "providerMetadata": {
+ "openai": {
+ "annotations": [
+ {
+ "end_index": 577,
+ "start_index": 458,
+ "title": "While You Were Sleeping: 5 stories you might have missed, Nov 19, 2025",
+ "type": "url_citation",
+ "url": "https://www.straitstimes.com/world/while-you-were-sleeping-5-stories-you-might-have-missed-nov-19-2025",
+ },
+ ],
+ "itemId": "msg_0dcf1a118189f28100691daa5a28488194b14fd6fa1d2895be",
+ },
+ },
"type": "text-end",
},
{
diff --git a/packages/azure/src/azure-openai-provider.test.ts b/packages/azure/src/azure-openai-provider.test.ts
index 798787150f9b..076e7c043ba1 100644
--- a/packages/azure/src/azure-openai-provider.test.ts
+++ b/packages/azure/src/azure-openai-provider.test.ts
@@ -1350,6 +1350,11 @@ describe('responses', () => {
},
{
"id": "msg_67c9a8787f4c8190b49c858d4c1cf20c",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_67c9a8787f4c8190b49c858d4c1cf20c",
+ },
+ },
"type": "text-end",
},
{
@@ -1559,6 +1564,25 @@ describe('responses', () => {
},
{
"id": "msg_456",
+ "providerMetadata": {
+ "openai": {
+ "annotations": [
+ {
+ "file_id": "assistant-YRcoCqn3Fo2K4JgraG",
+ "filename": "resource1.json",
+ "index": 145,
+ "type": "file_citation",
+ },
+ {
+ "file_id": "assistant-YRcoCqn3Fo2K4JgraG",
+ "filename": "resource1.json",
+ "index": 192,
+ "type": "file_citation",
+ },
+ ],
+ "itemId": "msg_456",
+ },
+ },
"type": "text-end",
},
{
diff --git a/packages/openai/src/responses/__snapshots__/openai-responses-language-model.test.ts.snap b/packages/openai/src/responses/__snapshots__/openai-responses-language-model.test.ts.snap
index 6e3658c8242b..32561001ee9f 100644
--- a/packages/openai/src/responses/__snapshots__/openai-responses-language-model.test.ts.snap
+++ b/packages/openai/src/responses/__snapshots__/openai-responses-language-model.test.ts.snap
@@ -2755,6 +2755,21 @@ exports[`OpenAIResponsesLanguageModel > doStream > code interpreter tool > shoul
},
{
"id": "msg_68c2e7054ae481938354ab3e4e77abad02d3a5742c7ddae9",
+ "providerMetadata": {
+ "openai": {
+ "annotations": [
+ {
+ "container_id": "cntr_68c2e6f380d881908a57a82d394434ff02f484f5344062e9",
+ "end_index": 465,
+ "file_id": "cfile_68c2e7084ab48191a67824aa1f4c90f1",
+ "filename": "roll2dice_sums_10000.csv",
+ "start_index": 423,
+ "type": "container_file_citation",
+ },
+ ],
+ "itemId": "msg_68c2e7054ae481938354ab3e4e77abad02d3a5742c7ddae9",
+ },
+ },
"type": "text-end",
},
{
@@ -3294,6 +3309,19 @@ providers and models, and which ones are available in the AI SDK.",
},
{
"id": "msg_06456cb9918b63780068cacd7c922081a1ae15f2672a51980f",
+ "providerMetadata": {
+ "openai": {
+ "annotations": [
+ {
+ "file_id": "file-Ebzhf8H4DPGPr9pUhr7n7v",
+ "filename": "ai.pdf",
+ "index": 379,
+ "type": "file_citation",
+ },
+ ],
+ "itemId": "msg_06456cb9918b63780068cacd7c922081a1ae15f2672a51980f",
+ },
+ },
"type": "text-end",
},
{
@@ -3800,6 +3828,25 @@ exports[`OpenAIResponsesLanguageModel > doStream > file search tool > should str
},
{
"id": "msg_0459517ad68504ad0068cabfc6b5c48192a15ac773668537f1",
+ "providerMetadata": {
+ "openai": {
+ "annotations": [
+ {
+ "file_id": "file-Ebzhf8H4DPGPr9pUhr7n7v",
+ "filename": "ai.pdf",
+ "index": 154,
+ "type": "file_citation",
+ },
+ {
+ "file_id": "file-Ebzhf8H4DPGPr9pUhr7n7v",
+ "filename": "ai.pdf",
+ "index": 382,
+ "type": "file_citation",
+ },
+ ],
+ "itemId": "msg_0459517ad68504ad0068cabfc6b5c48192a15ac773668537f1",
+ },
+ },
"type": "text-end",
},
{
@@ -3889,6 +3936,11 @@ exports[`OpenAIResponsesLanguageModel > doStream > image generation tool > shoul
},
{
"id": "msg_0df93c0bb83a72f20068c97a0b36f4819ea5906451007f95e2",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_0df93c0bb83a72f20068c97a0b36f4819ea5906451007f95e2",
+ },
+ },
"type": "text-end",
},
{
@@ -5897,6 +5949,11 @@ exports[`OpenAIResponsesLanguageModel > doStream > mcp tool > should stream mcp
},
{
"id": "msg_0c72b1033351981300690ccf97df988193b9c062fa247e6257",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_0c72b1033351981300690ccf97df988193b9c062fa247e6257",
+ },
+ },
"type": "text-end",
},
{
@@ -5975,6 +6032,11 @@ exports[`OpenAIResponsesLanguageModel > doStream > web search tool > should hand
},
{
"id": "msg_test",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_test",
+ },
+ },
"type": "text-end",
},
{
@@ -6415,6 +6477,55 @@ Would you like me to pull live updates or focus on a specific topic (arts,",
},
{
"id": "msg_68c187e279048192be3775da689aa25105ca09a4773fcd25",
+ "providerMetadata": {
+ "openai": {
+ "annotations": [
+ {
+ "end_index": 414,
+ "start_index": 327,
+ "title": "What to see at Berlin Art Week 2025 | Wallpaper*",
+ "type": "url_citation",
+ "url": "https://www.wallpaper.com/art/exhibitions-shows/berlin-art-week-2025",
+ },
+ {
+ "end_index": 697,
+ "start_index": 620,
+ "title": "Berlin 2025 – the main events | visitBerlin.de",
+ "type": "url_citation",
+ "url": "https://www.visitberlin.de/en/berlin-2025-the-main-events",
+ },
+ {
+ "end_index": 975,
+ "start_index": 898,
+ "title": "Berlin 2025 – the main events | visitBerlin.de",
+ "type": "url_citation",
+ "url": "https://www.visitberlin.de/en/berlin-2025-the-main-events",
+ },
+ {
+ "end_index": 1292,
+ "start_index": 1215,
+ "title": "Berlin 2025 – the main events | visitBerlin.de",
+ "type": "url_citation",
+ "url": "https://www.visitberlin.de/en/berlin-2025-the-main-events",
+ },
+ {
+ "end_index": 1602,
+ "start_index": 1525,
+ "title": "Berlin 2025 – the main events | visitBerlin.de",
+ "type": "url_citation",
+ "url": "https://www.visitberlin.de/en/berlin-2025-the-main-events",
+ },
+ {
+ "end_index": 2029,
+ "start_index": 1900,
+ "title": "Berlin holds off decision on participation in postwar Ukraine force | Reuters",
+ "type": "url_citation",
+ "url": "https://www.reuters.com/world/europe/berlin-postpones-decision-military-engagement-regarding-ukraine-2025-09-04/",
+ },
+ ],
+ "itemId": "msg_68c187e279048192be3775da689aa25105ca09a4773fcd25",
+ },
+ },
"type": "text-end",
},
{
diff --git a/packages/openai/src/responses/openai-responses-language-model.test.ts b/packages/openai/src/responses/openai-responses-language-model.test.ts
index 52afd559e2c8..4ac755a699a5 100644
--- a/packages/openai/src/responses/openai-responses-language-model.test.ts
+++ b/packages/openai/src/responses/openai-responses-language-model.test.ts
@@ -3257,6 +3257,11 @@ describe('OpenAIResponsesLanguageModel', () => {
},
{
"id": "msg_67c9a8787f4c8190b49c858d4c1cf20c",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_67c9a8787f4c8190b49c858d4c1cf20c",
+ },
+ },
"type": "text-end",
},
{
@@ -3328,6 +3333,11 @@ describe('OpenAIResponsesLanguageModel', () => {
},
{
"id": "msg_67c9a8787f4c8190b49c858d4c1cf20c",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_67c9a8787f4c8190b49c858d4c1cf20c",
+ },
+ },
"type": "text-end",
},
{
@@ -3555,6 +3565,11 @@ describe('OpenAIResponsesLanguageModel', () => {
},
{
"id": "msg_68b08bfc9a548196b15465b6020b04e40cd677a623b867d5",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_68b08bfc9a548196b15465b6020b04e40cd677a623b867d5",
+ },
+ },
"type": "text-end",
},
{
@@ -3631,6 +3646,11 @@ describe('OpenAIResponsesLanguageModel', () => {
},
{
"id": "msg_689cec4d46448195905a27fb9e12ff670f92af1765dd5aad",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_689cec4d46448195905a27fb9e12ff670f92af1765dd5aad",
+ },
+ },
"type": "text-end",
},
{
@@ -4113,6 +4133,11 @@ describe('OpenAIResponsesLanguageModel', () => {
},
{
"id": "msg_67c97c02656c81908e080dfdf4a03cd1",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_67c97c02656c81908e080dfdf4a03cd1",
+ },
+ },
"type": "text-end",
},
{
@@ -4224,6 +4249,11 @@ describe('OpenAIResponsesLanguageModel', () => {
},
{
"id": "msg_67c97c02656c81908e080dfdf4a03cd1",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_67c97c02656c81908e080dfdf4a03cd1",
+ },
+ },
"type": "text-end",
},
{
@@ -4406,6 +4436,11 @@ describe('OpenAIResponsesLanguageModel', () => {
},
{
"id": "msg_67c97c02656c81908e080dfdf4a03cd1",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_67c97c02656c81908e080dfdf4a03cd1",
+ },
+ },
"type": "text-end",
},
{
@@ -4519,6 +4554,11 @@ describe('OpenAIResponsesLanguageModel', () => {
},
{
"id": "msg_67c97c02656c81908e080dfdf4a03cd1",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_67c97c02656c81908e080dfdf4a03cd1",
+ },
+ },
"type": "text-end",
},
{
@@ -4715,6 +4755,11 @@ describe('OpenAIResponsesLanguageModel', () => {
},
{
"id": "msg_67c97c02656c81908e080dfdf4a03cd1",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_67c97c02656c81908e080dfdf4a03cd1",
+ },
+ },
"type": "text-end",
},
{
@@ -4778,6 +4823,11 @@ describe('OpenAIResponsesLanguageModel', () => {
},
{
"id": "msg_final_78d08d03767d92908f25523f5ge51e77",
+ "providerMetadata": {
+ "openai": {
+ "itemId": "msg_final_78d08d03767d92908f25523f5ge51e77",
+ },
+ },
"type": "text-end",
},
{
@@ -5011,6 +5061,25 @@ describe('OpenAIResponsesLanguageModel', () => {
},
{
"id": "msg_123",
+ "providerMetadata": {
+ "openai": {
+ "annotations": [
+ {
+ "end_index": 234,
+ "start_index": 123,
+ "title": "Example URL",
+ "type": "url_citation",
+ "url": "https://example.com",
+ },
+ {
+ "file_id": "file-abc123",
+ "quote": "This is a quote from the file",
+ "type": "file_citation",
+ },
+ ],
+ "itemId": "msg_123",
+ },
+ },
"type": "text-end",
},
{
@@ -5085,6 +5154,25 @@ describe('OpenAIResponsesLanguageModel', () => {
},
{
"id": "msg_456",
+ "providerMetadata": {
+ "openai": {
+ "annotations": [
+ {
+ "file_id": "file-YRcoCqn3Fo2K4JgraG",
+ "filename": "resource1.json",
+ "index": 145,
+ "type": "file_citation",
+ },
+ {
+ "file_id": "file-YRcoCqn3Fo2K4JgraG",
+ "filename": "resource1.json",
+ "index": 192,
+ "type": "file_citation",
+ },
+ ],
+ "itemId": "msg_456",
+ },
+ },
"type": "text-end",
},
{
@@ -5267,6 +5355,21 @@ describe('OpenAIResponsesLanguageModel', () => {
},
{
"id": "msg_68c2e7054ae481938354ab3e4e77abad02d3a5742c7ddae9",
+ "providerMetadata": {
+ "openai": {
+ "annotations": [
+ {
+ "container_id": "cntr_68c2e6f380d881908a57a82d394434ff02f484f5344062e9",
+ "end_index": 465,
+ "file_id": "cfile_68c2e7084ab48191a67824aa1f4c90f1",
+ "filename": "roll2dice_sums_10000.csv",
+ "start_index": 423,
+ "type": "container_file_citation",
+ },
+ ],
+ "itemId": "msg_68c2e7054ae481938354ab3e4e77abad02d3a5742c7ddae9",
+ },
+ },
"type": "text-end",
},
{
@@ -5444,6 +5547,17 @@ describe('OpenAIResponsesLanguageModel', () => {
},
{
"id": "msg_68c2e7054ae481938354ab3e4e77abad02d3a5742c7ddae9",
+ "providerMetadata": {
+ "openai": {
+ "annotations": [
+ {
+ "file_id": "cfile_68c2e7084ab48191a67824aa1f4c90f1",
+ "type": "file_path",
+ },
+ ],
+ "itemId": "msg_68c2e7054ae481938354ab3e4e77abad02d3a5742c7ddae9",
+ },
+ },
"type": "text-end",
},
{
diff --git a/packages/openai/src/responses/openai-responses-language-model.ts b/packages/openai/src/responses/openai-responses-language-model.ts
index c629c7102b15..9149f2849eee 100644
--- a/packages/openai/src/responses/openai-responses-language-model.ts
+++ b/packages/openai/src/responses/openai-responses-language-model.ts
@@ -836,6 +836,14 @@ export class OpenAIResponsesLanguageModel implements LanguageModelV3 {
| undefined
> = {};
+ // set annotations in 'text-end' part providerMetadata.
+ const ongoingAnnotations: Array<
+ Extract<
+ OpenAIResponsesChunk,
+ { type: 'response.output_text.annotation.added' }
+ >['annotation']
+ > = [];
+
// flag that checks if there have been client-side tool calls (not executed by openai)
let hasFunctionCall = false;
@@ -973,6 +981,7 @@ export class OpenAIResponsesLanguageModel implements LanguageModelV3 {
providerExecuted: true,
});
} else if (value.item.type === 'message') {
+ ongoingAnnotations.splice(0, ongoingAnnotations.length);
controller.enqueue({
type: 'text-start',
id: value.item.id,
@@ -1003,7 +1012,10 @@ export class OpenAIResponsesLanguageModel implements LanguageModelV3 {
},
});
}
- } else if (isResponseOutputItemDoneChunk(value)) {
+ } else if (
+ isResponseOutputItemDoneChunk(value) &&
+ value.item.type !== 'message'
+ ) {
if (value.item.type === 'function_call') {
ongoingToolCalls[value.output_index] = undefined;
hasFunctionCall = true;
@@ -1177,11 +1189,6 @@ export class OpenAIResponsesLanguageModel implements LanguageModelV3 {
openai: { itemId: value.item.id },
},
});
- } else if (value.item.type === 'message') {
- controller.enqueue({
- type: 'text-end',
- id: value.item.id,
- });
} else if (value.item.type === 'reasoning') {
const activeReasoningPart = activeReasoning[value.item.id];
@@ -1382,6 +1389,7 @@ export class OpenAIResponsesLanguageModel implements LanguageModelV3 {
serviceTier = value.response.service_tier;
}
} else if (isResponseAnnotationAddedChunk(value)) {
+ ongoingAnnotations.push(value.annotation);
if (value.annotation.type === 'url_citation') {
controller.enqueue({
type: 'source',
@@ -1452,6 +1460,22 @@ export class OpenAIResponsesLanguageModel implements LanguageModelV3 {
},
});
}
+ } else if (
+ isResponseOutputItemDoneChunk(value) &&
+ value.item.type === 'message'
+ ) {
+ controller.enqueue({
+ type: 'text-end',
+ id: value.item.id,
+ providerMetadata: {
+ openai: {
+ itemId: value.item.id,
+ ...(ongoingAnnotations.length > 0 && {
+ annotations: ongoingAnnotations,
+ }),
+ },
+ },
+ });
} else if (isErrorChunk(value)) {
controller.enqueue({ type: 'error', error: value });
}