diff --git a/README.md b/README.md index 63dfcba..40c58ec 100644 --- a/README.md +++ b/README.md @@ -2,31 +2,30 @@ The [OpenRouter](https://openrouter.ai/) provider for the [Vercel AI SDK](https://sdk.vercel.ai/docs) gives access to over 300 large language models on the OpenRouter chat and completion APIs. -## Setup for AI SDK v5 +## Overview -```bash -# For pnpm -pnpm add @openrouter/ai-sdk-provider +This provider allows you to use the Vercel AI SDK with the OpenRouter API. It provides a seamless integration, allowing you to leverage the power of OpenRouter's extensive model catalog with the convenience of the AI SDK. -# For npm -npm install @openrouter/ai-sdk-provider +## Features -# For yarn -yarn add @openrouter/ai-sdk-provider -``` +- **Access to over 300 models**: Use any of the models available on OpenRouter, including the latest open-source and proprietary models. +- **Chat and completion APIs**: Use both the chat and completion APIs, with support for streaming and non-streaming responses. +- **Tool support**: Use tools with supported models to build powerful applications. +- **Usage accounting**: Track your token usage and costs with OpenRouter's usage accounting feature. +- **Anthropic prompt caching**: Leverage Anthropic's prompt caching for faster and cheaper responses. +- **Provider routing**: Control how your requests are routed to different providers. -## (LEGACY) Setup for AI SDK v4 +## Setup ```bash # For pnpm -pnpm add @openrouter/ai-sdk-provider@ai-sdk-v4 +pnpm add @openrouter/ai-sdk-provider # For npm -npm install @openrouter/ai-sdk-provider@ai-sdk-v4 +npm install @openrouter/ai-sdk-provider # For yarn -yarn add @openrouter/ai-sdk-provider@ai-sdk-v4 - +yarn add @openrouter/ai-sdk-provider ``` ## Provider Instance @@ -37,6 +36,17 @@ You can import the default provider instance `openrouter` from `@openrouter/ai-s import { openrouter } from '@openrouter/ai-sdk-provider'; ``` +You can also create your own provider instance with custom settings: + +```ts +import { createOpenRouter } from '@openrouter/ai-sdk-provider'; + +const openrouter = createOpenRouter({ + apiKey: 'YOUR_API_KEY', + baseURL: 'https://my-proxy.com/api/v1', +}); +``` + ## Example ```ts @@ -197,3 +207,7 @@ if (result.providerMetadata?.openrouter?.usage) { ); } ``` + +## API Reference + +The full API reference is available in the [generated documentation](). diff --git a/e2e/tools.ts b/e2e/tools.ts index c672e9f..2310cf6 100644 --- a/e2e/tools.ts +++ b/e2e/tools.ts @@ -9,6 +9,9 @@ const openrouter = createOpenRouter({ baseUrl: `${process.env.OPENROUTER_API_BASE}/api/v1`, }); +/** + * A tool for sending an SMS message. + */ export const sendSMSTool = tool({ description: 'Send an SMS to any phone number', inputSchema: z.object({ @@ -24,6 +27,9 @@ export const sendSMSTool = tool({ }, }); +/** + * A tool for reading an SMS message. + */ export const readSMSTool = tool({ description: 'Read the nth SMS from a phone number', inputSchema: z.object({ @@ -39,6 +45,9 @@ export const readSMSTool = tool({ }, }); +/** + * A tool for executing a command in the terminal. + */ export const executeCommandInTerminalTool = tool({ description: 'Execute a command in the terminal', inputSchema: z.object({ diff --git a/examples/next-chat/.env.local.example b/examples/next-chat/.env.local.example new file mode 100644 index 0000000..bcdf210 --- /dev/null +++ b/examples/next-chat/.env.local.example @@ -0,0 +1,5 @@ +# Required: obtain an API key from https://openrouter.ai/keys +OPENROUTER_API_KEY=sk-or-... + +# Optional: override the base URL if you are pointing at a proxy. +# OPENROUTER_BASE_URL=https://openrouter.ai/api/v1 diff --git a/examples/next-chat/README.md b/examples/next-chat/README.md new file mode 100644 index 0000000..76a99cc --- /dev/null +++ b/examples/next-chat/README.md @@ -0,0 +1,49 @@ +# OpenRouter Next.js Chat Example + +This example demonstrates how to build a streaming chat experience in Next.js using the +[`@openrouter/ai-sdk-provider`](https://www.npmjs.com/package/@openrouter/ai-sdk-provider) +and the Vercel AI SDK. The UI lets you: + +- pick an OpenRouter model +- toggle tool usage on or off +- watch streaming assistant replies +- inspect tool invocations and their inputs/outputs in real time + +## Getting Started + +1. Install dependencies: + + ```bash + pnpm install + pnpm --filter @openrouter/examples-next-chat dev + ``` + + > **Note:** the example is part of the monorepo. You can also `cd examples/next-chat` + > and run `pnpm install` followed by `pnpm dev`. + +2. Copy the example environment file and add your OpenRouter key: + + ```bash + cp examples/next-chat/.env.local.example examples/next-chat/.env.local + ``` + + At minimum you need `OPENROUTER_API_KEY`. Set `OPENROUTER_BASE_URL` if you proxy requests. + +3. Start the development server: + + ```bash + pnpm --filter @openrouter/examples-next-chat dev + ``` + + Visit `http://localhost:3000` to try the chat experience. + +## How It Works + +- `app/api/chat/route.ts` configures the OpenRouter provider, streams responses with tools, and + returns AI SDK UI message streams. +- `app/page.tsx` implements a small client-side state machine that consumes the stream, renders + messages, and keeps track of tool invocations. +- `lib/tools.ts` defines two sample tools (`getCurrentWeather` and `getCurrentTime`). You can add + your own tools or wire in real data sources. + +This example is intentionally lightweight so you can adapt it for your own projects. diff --git a/examples/next-chat/app/api/chat/route.ts b/examples/next-chat/app/api/chat/route.ts new file mode 100644 index 0000000..cf55968 --- /dev/null +++ b/examples/next-chat/app/api/chat/route.ts @@ -0,0 +1,65 @@ +import { createOpenRouter } from '@openrouter/ai-sdk-provider'; +import type { ModelMessage } from 'ai'; +import { streamText } from 'ai'; + +import { BASIC_TOOLS } from '../../../lib/tools'; +import { DEFAULT_SYSTEM_PROMPT } from '../../../lib/models'; + +interface ChatRequestBody { + modelId: string; + toolMode?: 'auto' | 'disabled'; + messages: ModelMessage[]; +} + +const openrouter = createOpenRouter({ + compatibility: 'strict', + baseURL: process.env.OPENROUTER_BASE_URL ?? process.env.OPENROUTER_API_BASE, +}); + +function normalizeToolMode(toolMode: ChatRequestBody['toolMode']) { + return toolMode === 'disabled' ? 'disabled' : 'auto'; +} + +export async function POST(request: Request) { + const apiKey = process.env.OPENROUTER_API_KEY; + if (!apiKey) { + return Response.json( + { error: 'Missing OPENROUTER_API_KEY environment variable.' }, + { status: 500 }, + ); + } + + let body: ChatRequestBody; + try { + body = (await request.json()) as ChatRequestBody; + } catch (_error) { + return Response.json({ error: 'Invalid JSON payload.' }, { status: 400 }); + } + + if (!body || typeof body.modelId !== 'string') { + return Response.json({ error: 'Request must include a modelId string.' }, { status: 400 }); + } + + if (!Array.isArray(body.messages) || body.messages.some((message) => typeof message !== 'object')) { + return Response.json({ error: 'Messages must be an array of chat messages.' }, { status: 400 }); + } + + const toolMode = normalizeToolMode(body.toolMode); + const shouldExposeTools = toolMode !== 'disabled'; + + try { + const result = streamText({ + model: openrouter(body.modelId), + system: DEFAULT_SYSTEM_PROMPT, + messages: body.messages, + tools: shouldExposeTools ? BASIC_TOOLS : undefined, + toolChoice: shouldExposeTools ? 'auto' : 'none', + }); + + return result.toUIMessageStreamResponse(); + } catch (error) { + const errorMessage = + error instanceof Error ? error.message : 'Unknown error while contacting OpenRouter.'; + return Response.json({ error: errorMessage }, { status: 500 }); + } +} diff --git a/examples/next-chat/app/globals.css b/examples/next-chat/app/globals.css new file mode 100644 index 0000000..eedeb41 --- /dev/null +++ b/examples/next-chat/app/globals.css @@ -0,0 +1,462 @@ +:root { + color-scheme: light dark; + font-family: 'Inter', -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; + line-height: 1.5; + --surface-light: #ffffff; + --surface-dark: #0f172a; + --background-light: #f5f6fb; + --background-dark: #020617; + --text-muted-light: rgba(15, 23, 42, 0.65); + --text-muted-dark: rgba(226, 232, 240, 0.75); +} + +* { + box-sizing: border-box; +} + +body { + margin: 0; + min-height: 100vh; + background-color: var(--background-light); + color: #0f172a; +} + +@media (prefers-color-scheme: dark) { + body { + background-color: var(--background-dark); + color: #f8fafc; + } +} + +main { + min-height: 100vh; + display: flex; + justify-content: center; + padding: 2rem 1.5rem 3rem; +} + +.container { + width: min(960px, 100%); + display: flex; + flex-direction: column; + gap: 1.5rem; +} + +header { + text-align: center; +} + +header h1 { + margin: 0; + font-size: clamp(1.8rem, 2.4vw + 1rem, 2.4rem); +} + +.subtitle { + margin: 0.5rem 0 0; + color: var(--text-muted-light); +} + +@media (prefers-color-scheme: dark) { + .subtitle { + color: var(--text-muted-dark); + } +} + +.controls { + display: grid; + gap: 1rem; + grid-template-columns: repeat(auto-fit, minmax(220px, 1fr)); +} + +.control { + background-color: rgba(255, 255, 255, 0.88); + border: 1px solid rgba(15, 23, 42, 0.08); + border-radius: 12px; + padding: 1rem; + display: flex; + flex-direction: column; + gap: 0.5rem; + box-shadow: 0 12px 30px rgba(15, 23, 42, 0.08); +} + +@media (prefers-color-scheme: dark) { + .control { + background-color: rgba(15, 23, 42, 0.55); + border-color: rgba(148, 163, 184, 0.18); + box-shadow: 0 12px 30px rgba(0, 0, 0, 0.45); + } +} + +label, +.label { + font-weight: 600; + font-size: 0.8rem; + letter-spacing: 0.05em; + text-transform: uppercase; + color: rgba(15, 23, 42, 0.7); +} + +@media (prefers-color-scheme: dark) { + label, + .label { + color: rgba(226, 232, 240, 0.85); + } +} + +select, +textarea, +button { + font: inherit; +} + +select, +textarea { + width: 100%; + border-radius: 10px; + border: 1px solid rgba(15, 23, 42, 0.16); + padding: 0.6rem 0.75rem; + background-color: rgba(255, 255, 255, 0.96); + color: inherit; + transition: border-color 0.2s ease, box-shadow 0.2s ease; +} + +textarea { + min-height: 120px; + resize: vertical; + line-height: 1.4; +} + +select:focus, +textarea:focus { + outline: none; + border-color: #2563eb; + box-shadow: 0 0 0 3px rgba(37, 99, 235, 0.25); +} + +select:disabled, +textarea:disabled { + opacity: 0.55; + cursor: not-allowed; +} + +@media (prefers-color-scheme: dark) { + select, + textarea { + background-color: rgba(15, 23, 42, 0.78); + border-color: rgba(148, 163, 184, 0.2); + } + + select:focus, + textarea:focus { + border-color: #60a5fa; + box-shadow: 0 0 0 3px rgba(96, 165, 250, 0.25); + } +} + +.badge { + display: inline-flex; + align-items: center; + padding: 0.2rem 0.6rem; + border-radius: 999px; + font-size: 0.75rem; + font-weight: 600; + background-color: rgba(37, 99, 235, 0.12); + color: #1d4ed8; + border: 1px solid rgba(37, 99, 235, 0.18); + width: fit-content; +} + +.badge.idle { + background-color: rgba(15, 23, 42, 0.08); + color: rgba(15, 23, 42, 0.75); + border-color: transparent; +} + +.badge.status.collecting { + background-color: rgba(16, 185, 129, 0.12); + color: #047857; + border-color: rgba(16, 185, 129, 0.2); +} + +.badge.status.running, +.badge.status.complete { + background-color: rgba(59, 130, 246, 0.12); + color: #1d4ed8; + border-color: rgba(59, 130, 246, 0.2); +} + +.badge.status.error { + background-color: rgba(239, 68, 68, 0.14); + color: #b91c1c; + border-color: rgba(239, 68, 68, 0.2); +} + +@media (prefers-color-scheme: dark) { + .badge { + background-color: rgba(96, 165, 250, 0.18); + color: #bfdbfe; + border-color: rgba(96, 165, 250, 0.24); + } + + .badge.idle { + background-color: rgba(148, 163, 184, 0.22); + color: rgba(226, 232, 240, 0.85); + } + + .badge.status.collecting { + background-color: rgba(45, 212, 191, 0.24); + color: #5eead4; + } + + .badge.status.running, + .badge.status.complete { + background-color: rgba(96, 165, 250, 0.24); + color: #bfdbfe; + } + + .badge.status.error { + background-color: rgba(248, 113, 113, 0.22); + color: #fecaca; + } +} + +.primary, +.secondary { + border-radius: 999px; + border: none; + cursor: pointer; + font-weight: 600; + padding: 0.55rem 1.2rem; + transition: transform 0.15s ease, box-shadow 0.15s ease, opacity 0.15s ease; +} + +.primary { + background: linear-gradient(135deg, #2563eb, #4f46e5); + color: #fff; + box-shadow: 0 10px 24px rgba(37, 99, 235, 0.35); +} + +.primary:disabled { + opacity: 0.5; + cursor: not-allowed; + box-shadow: none; +} + +.primary:not(:disabled):active { + transform: translateY(1px); +} + +.secondary { + background-color: transparent; + border: 1px solid rgba(15, 23, 42, 0.2); + color: inherit; +} + +.secondary:disabled { + opacity: 0.55; + cursor: not-allowed; +} + +@media (prefers-color-scheme: dark) { + .secondary { + border-color: rgba(148, 163, 184, 0.35); + } +} + +.chat-panel { + display: flex; + flex-direction: column; + gap: 1rem; + background-color: rgba(255, 255, 255, 0.92); + border: 1px solid rgba(15, 23, 42, 0.08); + border-radius: 16px; + padding: 1.5rem; + box-shadow: 0 24px 48px rgba(15, 23, 42, 0.12); +} + +@media (prefers-color-scheme: dark) { + .chat-panel { + background-color: rgba(15, 23, 42, 0.65); + border-color: rgba(148, 163, 184, 0.2); + box-shadow: 0 24px 48px rgba(0, 0, 0, 0.4); + } +} + +.alert { + padding: 0.75rem 1rem; + border-radius: 12px; + background-color: rgba(239, 68, 68, 0.14); + color: #991b1b; + border: 1px solid rgba(239, 68, 68, 0.2); +} + +@media (prefers-color-scheme: dark) { + .alert { + background-color: rgba(239, 68, 68, 0.22); + color: #fecaca; + } +} + +.chat-log { + display: flex; + flex-direction: column; + gap: 1rem; + padding: 1rem; + border-radius: 12px; + border: 1px solid rgba(15, 23, 42, 0.08); + max-height: 420px; + overflow-y: auto; + background: rgba(248, 250, 252, 0.85); +} + +@media (prefers-color-scheme: dark) { + .chat-log { + background: rgba(30, 41, 59, 0.62); + border-color: rgba(148, 163, 184, 0.16); + } +} + +.empty { + margin: 0; + color: rgba(15, 23, 42, 0.6); +} + +@media (prefers-color-scheme: dark) { + .empty { + color: rgba(226, 232, 240, 0.7); + } +} + +.message { + padding: 1rem; + border-radius: 12px; + background-color: rgba(255, 255, 255, 0.95); + border: 1px solid rgba(15, 23, 42, 0.06); + display: flex; + flex-direction: column; + gap: 0.75rem; +} + +.message.user { + border-left: 4px solid rgba(37, 99, 235, 0.6); +} + +.message.assistant { + border-left: 4px solid rgba(16, 185, 129, 0.55); +} + +@media (prefers-color-scheme: dark) { + .message { + background-color: rgba(15, 23, 42, 0.74); + border-color: rgba(148, 163, 184, 0.12); + } + + .message.user { + border-left-color: rgba(96, 165, 250, 0.6); + } + + .message.assistant { + border-left-color: rgba(45, 212, 191, 0.6); + } +} + +.message-header { + display: flex; + align-items: center; + justify-content: space-between; + gap: 0.5rem; +} + +.role-label { + font-weight: 600; + letter-spacing: 0.01em; +} + +.message-text { + margin: 0; + white-space: pre-wrap; + line-height: 1.55; +} + +.tool-list { + display: flex; + flex-direction: column; + gap: 0.75rem; +} + +.tool-card { + border-radius: 10px; + border: 1px solid rgba(37, 99, 235, 0.18); + padding: 0.75rem 0.85rem; + background: rgba(37, 99, 235, 0.07); + display: flex; + flex-direction: column; + gap: 0.5rem; +} + +.tool-card-header { + display: flex; + justify-content: space-between; + align-items: center; + gap: 0.5rem; +} + +.tool-name { + font-weight: 600; +} + +.tool-block { + display: flex; + flex-direction: column; + gap: 0.35rem; +} + +.tool-block pre { + margin: 0; + padding: 0.6rem; + border-radius: 8px; + background: rgba(15, 23, 42, 0.08); + border: 1px solid rgba(15, 23, 42, 0.1); + font-size: 0.8rem; + line-height: 1.4; + max-height: 200px; + overflow: auto; + white-space: pre-wrap; +} + +@media (prefers-color-scheme: dark) { + .tool-card { + background: rgba(96, 165, 250, 0.18); + border-color: rgba(96, 165, 250, 0.24); + } + + .tool-block pre { + background: rgba(15, 23, 42, 0.78); + border-color: rgba(148, 163, 184, 0.2); + } +} + +.hint { + margin: 0; + font-size: 0.8rem; + color: rgba(15, 23, 42, 0.6); +} + +@media (prefers-color-scheme: dark) { + .hint { + color: rgba(226, 232, 240, 0.7); + } +} + +.chat-form { + display: flex; + flex-direction: column; + gap: 0.75rem; +} + +.actions { + display: flex; + gap: 0.75rem; + justify-content: flex-end; + flex-wrap: wrap; +} diff --git a/examples/next-chat/app/layout.tsx b/examples/next-chat/app/layout.tsx new file mode 100644 index 0000000..3c4e069 --- /dev/null +++ b/examples/next-chat/app/layout.tsx @@ -0,0 +1,17 @@ +import type { Metadata } from 'next'; +import type { ReactNode } from 'react'; +import './globals.css'; + +export const metadata: Metadata = { + title: 'OpenRouter Chat Playground', + description: + 'A minimal Next.js chat app that demonstrates streaming OpenRouter responses, model selection, and tool use.', +}; + +export default function RootLayout({ children }: { children: ReactNode }) { + return ( + + {children} + + ); +} diff --git a/examples/next-chat/app/page.tsx b/examples/next-chat/app/page.tsx new file mode 100644 index 0000000..11cf687 --- /dev/null +++ b/examples/next-chat/app/page.tsx @@ -0,0 +1,566 @@ +'use client'; + +import { useCallback, useEffect, useMemo, useRef, useState } from 'react'; +import type { FormEvent } from 'react'; +import type { ModelMessage } from 'ai'; + +import type { ToolMode } from '../lib/models'; +import { DEFAULT_MODEL_ID, DEFAULT_TOOL_MODE, MODEL_OPTIONS } from '../lib/models'; + +type ToolStatus = 'collecting' | 'running' | 'complete' | 'error'; + +interface ToolCall { + id: string; + name: string; + status: ToolStatus; + inputText?: string; + resultText?: string; + errorText?: string; + providerExecuted?: boolean; +} + +interface ConversationEntry { + id: string; + role: 'user' | 'assistant'; + text: string; + tools: ToolCall[]; + pending: boolean; +} + +const TOOL_STATUS_LABEL: Record = { + collecting: 'Collecting input', + running: 'Running', + complete: 'Completed', + error: 'Error', +}; + +const TOOL_MODE_OPTIONS: Array<{ value: ToolMode; label: string }> = [ + { value: 'auto', label: 'Automatic tool calling' }, + { value: 'disabled', label: 'Disable tools' }, +]; + +function createMessageId(counterRef: { current: number }, prefix: string) { + counterRef.current += 1; + return `${prefix}-${Date.now()}-${counterRef.current}`; +} + +function mapConversationToModelMessages(history: ConversationEntry[]): ModelMessage[] { + return history.map((entry) => + entry.role === 'user' + ? ({ role: 'user', content: entry.text } as ModelMessage) + : ({ role: 'assistant', content: entry.text } as ModelMessage), + ); +} + +function formatData(value: unknown): string { + if (value === undefined || value === null) { + return '—'; + } + + if (typeof value === 'string') { + return value; + } + + try { + return JSON.stringify(value, null, 2); + } catch { + return String(value); + } +} + +export default function ChatPage() { + const [conversation, setConversation] = useState([]); + const [input, setInput] = useState(''); + const [modelId, setModelId] = useState(DEFAULT_MODEL_ID); + const [toolMode, setToolMode] = useState(DEFAULT_TOOL_MODE); + const [isStreaming, setIsStreaming] = useState(false); + const [error, setError] = useState(null); + + const abortControllerRef = useRef(null); + const messageCounterRef = useRef(0); + const currentAssistantIdRef = useRef(null); + + const selectedModel = useMemo( + () => MODEL_OPTIONS.find((option) => option.id === modelId) ?? MODEL_OPTIONS[0], + [modelId], + ); + const toolsSupported = selectedModel?.supportsTools ?? false; + + useEffect(() => { + if (!toolsSupported && toolMode !== 'disabled') { + setToolMode('disabled'); + } + }, [toolMode, toolsSupported]); + + const appendEntry = useCallback((entry: ConversationEntry) => { + setConversation((prev) => [...prev, entry]); + }, []); + + const updateEntryById = useCallback( + (id: string, updater: (entry: ConversationEntry) => ConversationEntry) => { + setConversation((prev) => { + const index = prev.findIndex((item) => item.id === id); + if (index === -1) { + return prev; + } + + const updated = updater(prev[index]); + if (updated === prev[index]) { + return prev; + } + + const next = [...prev]; + next[index] = updated; + return next; + }); + }, + [], + ); + + const handleStop = useCallback(() => { + abortControllerRef.current?.abort(); + }, []); + + const handleClear = useCallback(() => { + abortControllerRef.current?.abort(); + abortControllerRef.current = null; + currentAssistantIdRef.current = null; + setConversation([]); + setInput(''); + setError(null); + setIsStreaming(false); + }, []); + + const streamResponse = useCallback( + async (history: ConversationEntry[]) => { + setIsStreaming(true); + const controller = new AbortController(); + abortControllerRef.current = controller; + + const payload = JSON.stringify({ + messages: mapConversationToModelMessages(history), + modelId, + toolMode, + }); + + const toolBuffers = new Map(); + + const ensureAssistantMessage = (messageId?: string) => { + if (currentAssistantIdRef.current) { + return currentAssistantIdRef.current; + } + + const newId = messageId ?? createMessageId(messageCounterRef, 'assistant'); + currentAssistantIdRef.current = newId; + appendEntry({ + id: newId, + role: 'assistant', + text: '', + tools: [], + pending: true, + }); + return newId; + }; + + const updateToolCall = ( + assistantId: string, + toolId: string, + updater: (tool: ToolCall) => ToolCall, + ) => { + updateEntryById(assistantId, (entry) => ({ + ...entry, + tools: entry.tools.map((tool) => (tool.id === toolId ? updater(tool) : tool)), + })); + }; + + const parseEvent = (eventText: string) => { + const dataLines = eventText + .split('\n') + .filter((line) => line.startsWith('data:')) + .map((line) => line.slice(5).trim()); + + if (dataLines.length === 0) { + return null; + } + + const payloadText = dataLines.join(''); + if (!payloadText) { + return null; + } + + try { + return JSON.parse(payloadText) as Record; + } catch { + return null; + } + }; + + const finalizeAssistant = (fallback?: string) => { + const assistantId = currentAssistantIdRef.current; + if (!assistantId) { + return; + } + + updateEntryById(assistantId, (entry) => ({ + ...entry, + text: entry.text || fallback || entry.text, + pending: false, + })); + }; + + const processChunk = (chunk: Record) => { + const type = typeof chunk.type === 'string' ? chunk.type : null; + if (!type) { + return; + } + + if (type === 'start') { + const messageId = typeof chunk.messageId === 'string' ? chunk.messageId : undefined; + ensureAssistantMessage(messageId); + return; + } + + const assistantId = ensureAssistantMessage(); + + switch (type) { + case 'text-delta': { + if (typeof chunk.delta === 'string') { + updateEntryById(assistantId, (entry) => ({ + ...entry, + text: entry.text + chunk.delta, + })); + } + break; + } + case 'tool-input-start': { + if (typeof chunk.toolCallId === 'string' && typeof chunk.toolName === 'string') { + toolBuffers.set(chunk.toolCallId, ''); + const status: ToolStatus = chunk.providerExecuted === true ? 'running' : 'collecting'; + updateEntryById(assistantId, (entry) => ({ + ...entry, + tools: [ + ...entry.tools, + { + id: chunk.toolCallId as string, + name: chunk.toolName as string, + status, + providerExecuted: chunk.providerExecuted === true, + }, + ], + })); + } + break; + } + case 'tool-input-delta': { + if (typeof chunk.toolCallId === 'string' && typeof chunk.inputTextDelta === 'string') { + const nextValue = (toolBuffers.get(chunk.toolCallId) ?? '') + chunk.inputTextDelta; + toolBuffers.set(chunk.toolCallId, nextValue); + updateToolCall(assistantId, chunk.toolCallId, (tool) => ({ + ...tool, + inputText: nextValue, + status: tool.status === 'collecting' ? 'collecting' : tool.status, + })); + } + break; + } + case 'tool-input-available': { + if (typeof chunk.toolCallId === 'string') { + const formatted = + 'input' in chunk ? formatData((chunk as { input?: unknown }).input) : undefined; + const preview = toolBuffers.get(chunk.toolCallId); + toolBuffers.delete(chunk.toolCallId); + updateToolCall(assistantId, chunk.toolCallId, (tool) => ({ + ...tool, + inputText: formatted ?? preview ?? tool.inputText, + providerExecuted: tool.providerExecuted || chunk.providerExecuted === true, + status: 'running', + })); + } + break; + } + case 'tool-output-available': { + if (typeof chunk.toolCallId === 'string') { + const formatted = + 'output' in chunk ? formatData((chunk as { output?: unknown }).output) : undefined; + updateToolCall(assistantId, chunk.toolCallId, (tool) => ({ + ...tool, + resultText: formatted ?? tool.resultText, + status: 'complete', + })); + } + break; + } + case 'tool-output-error': { + if (typeof chunk.toolCallId === 'string') { + const errorText = + typeof chunk.errorText === 'string' ? chunk.errorText : 'Tool error'; + updateToolCall(assistantId, chunk.toolCallId, (tool) => ({ + ...tool, + errorText, + status: 'error', + })); + } + break; + } + case 'finish': { + updateEntryById(assistantId, (entry) => ({ + ...entry, + pending: false, + })); + break; + } + case 'abort': { + finalizeAssistant('Response aborted.'); + break; + } + case 'error': { + if (typeof chunk.errorText === 'string') { + setError(chunk.errorText); + } + finalizeAssistant('The model returned an error.'); + break; + } + default: + break; + } + }; + + try { + const response = await fetch('/api/chat', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: payload, + signal: controller.signal, + }); + + if (!response.ok || !response.body) { + const message = await response.text(); + throw new Error(message || 'Unable to reach the chat endpoint.'); + } + + const reader = response.body.getReader(); + const decoder = new TextDecoder(); + let buffer = ''; + + while (true) { + const { value, done } = await reader.read(); + buffer += decoder.decode(value ?? new Uint8Array(), { stream: !done }); + + let boundary = buffer.indexOf('\n\n'); + while (boundary !== -1) { + const eventText = buffer.slice(0, boundary); + buffer = buffer.slice(boundary + 2); + const chunk = parseEvent(eventText); + if (chunk) { + processChunk(chunk); + } + boundary = buffer.indexOf('\n\n'); + } + + if (done) { + break; + } + } + + finalizeAssistant(); + } catch (error) { + if (controller.signal.aborted) { + finalizeAssistant('Generation cancelled.'); + return; + } + + const message = + error instanceof Error ? error.message : 'Unexpected error while streaming response.'; + setError(message); + finalizeAssistant('The response ended unexpectedly.'); + } finally { + setIsStreaming(false); + abortControllerRef.current = null; + currentAssistantIdRef.current = null; + } + }, + [appendEntry, modelId, toolMode, updateEntryById], + ); + + const handleSubmit = useCallback( + (event: FormEvent) => { + event.preventDefault(); + if (isStreaming) { + return; + } + + const trimmed = input.trim(); + if (!trimmed) { + return; + } + + const userEntry: ConversationEntry = { + id: createMessageId(messageCounterRef, 'user'), + role: 'user', + text: trimmed, + tools: [], + pending: false, + }; + + const nextConversation = [...conversation, userEntry]; + setConversation(nextConversation); + setInput(''); + setError(null); + void streamResponse(nextConversation); + }, + [conversation, input, isStreaming, streamResponse], + ); + + return ( +
+
+
+

OpenRouter Chat Playground

+

+ Pick a model, decide whether tool calling is enabled, and chat with a streaming assistant. +

+
+ +
+
+ + + {selectedModel ?

{selectedModel.description}

: null} +
+ +
+ + + {!toolsSupported ?

Tools are disabled for this model.

: null} +
+ +
+ Status + + {isStreaming ? 'Streaming response…' : 'Ready'} + +
+ +
+ Conversation + +
+
+ +
+ {error ?
{error}
: null} +
+ {conversation.length === 0 ? ( +

+ Start by asking a question. The assistant streams its reply and displays each tool call. +

+ ) : ( + conversation.map((entry) => ( +
+
+ {entry.role === 'user' ? 'You' : 'Assistant'} + {entry.pending ? Streaming… : null} +
+ {entry.text ?

{entry.text}

: null} + {entry.tools.length > 0 ? ( +
+ {entry.tools.map((tool) => ( +
+
+ {tool.name} + + {TOOL_STATUS_LABEL[tool.status]} + +
+ {tool.providerExecuted ?

Executed by provider

: null} + {tool.inputText ? ( +
+

Input

+
{tool.inputText}
+
+ ) : null} + {tool.resultText ? ( +
+

Result

+
{tool.resultText}
+
+ ) : null} + {tool.errorText ? ( +
+

Error

+
{tool.errorText}
+
+ ) : null} +
+ ))} +
+ ) : null} +
+ )) + )} +
+ +
+ +