Skip to content

Commit 50892cf

Browse files
authored
Tracing with Langfuse (#19)
Tracing with Langfuse
1 parent a3ba907 commit 50892cf

25 files changed

+2127
-6
lines changed

config/gni/devtools_grd_files.gni

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -640,6 +640,9 @@ grd_files_release_sources = [
640640
"front_end/panels/ai_chat/common/log.js",
641641
"front_end/panels/ai_chat/common/context.js",
642642
"front_end/panels/ai_chat/common/page.js",
643+
"front_end/panels/ai_chat/tracing/TracingProvider.js",
644+
"front_end/panels/ai_chat/tracing/LangfuseProvider.js",
645+
"front_end/panels/ai_chat/tracing/TracingConfig.js",
643646
"front_end/panels/ai_chat/ai_chat-meta.js",
644647
"front_end/panels/ai_chat/ai_chat.js",
645648
"front_end/panels/ai_chat/ai_chat_impl.js",

front_end/panels/ai_chat/BUILD.gn

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,9 @@ devtools_module("ai_chat") {
7777
"common/log.ts",
7878
"common/context.ts",
7979
"common/page.ts",
80+
"tracing/TracingProvider.ts",
81+
"tracing/LangfuseProvider.ts",
82+
"tracing/TracingConfig.ts",
8083
]
8184

8285
deps = [
@@ -158,6 +161,9 @@ _ai_chat_sources = [
158161
"common/log.ts",
159162
"common/context.ts",
160163
"common/page.ts",
164+
"tracing/TracingProvider.ts",
165+
"tracing/LangfuseProvider.ts",
166+
"tracing/TracingConfig.ts",
161167
]
162168

163169
# Construct the expected JS output paths for the metadata

front_end/panels/ai_chat/LLM/LLMErrorHandler.ts

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -166,6 +166,49 @@ export class LLMRetryManager {
166166
this.onRetry = config.onRetry;
167167
}
168168

169+
/**
170+
* Creates a tracing observation for retry attempts and errors
171+
*/
172+
private async createRetryTracingObservation(
173+
error: Error,
174+
errorType: LLMErrorType,
175+
attempt: number,
176+
willRetry: boolean,
177+
context?: string
178+
): Promise<void> {
179+
try {
180+
const { getCurrentTracingContext, createTracingProvider } = await import('../tracing/TracingConfig.js');
181+
const tracingContext = getCurrentTracingContext();
182+
if (tracingContext) {
183+
const tracingProvider = createTracingProvider();
184+
await tracingProvider.createObservation({
185+
id: `error-retry-${Date.now()}-${Math.random().toString(36).substring(2, 6)}`,
186+
name: willRetry ? `LLM Error - Retry Attempt ${attempt}` : `LLM Error - Final Failure`,
187+
type: 'event',
188+
startTime: new Date(),
189+
input: {
190+
errorType,
191+
errorMessage: error.message,
192+
attempt,
193+
willRetry,
194+
context: context || 'unknown_operation'
195+
},
196+
error: willRetry ? undefined : error.message, // Only mark as error if not retrying
197+
metadata: {
198+
errorType,
199+
attempt,
200+
willRetry,
201+
retryable: LLMErrorClassifier.shouldRetry(errorType),
202+
operation: context
203+
}
204+
}, tracingContext.traceId);
205+
}
206+
} catch (tracingError) {
207+
// Don't fail the main operation due to tracing errors
208+
logger.debug('Failed to create retry tracing observation:', tracingError);
209+
}
210+
}
211+
169212
/**
170213
* Execute an operation with retry logic
171214
*/
@@ -209,6 +252,10 @@ export class LLMRetryManager {
209252
if (this.config.enableLogging) {
210253
logger.info(`Not retrying ${errorType} error`);
211254
}
255+
256+
// Create tracing observation for non-retryable error
257+
await this.createRetryTracingObservation(lastError, errorType, attempt, false, options.context);
258+
212259
throw lastError;
213260
}
214261

@@ -220,6 +267,10 @@ export class LLMRetryManager {
220267
if (this.config.enableLogging) {
221268
logger.error(`Max retries (${retryConfig.maxRetries}) exceeded for ${errorType}`);
222269
}
270+
271+
// Create tracing observation for max retries exceeded
272+
await this.createRetryTracingObservation(lastError, errorType, attempt, false, options.context);
273+
223274
throw lastError;
224275
}
225276

@@ -228,6 +279,10 @@ export class LLMRetryManager {
228279
if (this.config.enableLogging) {
229280
logger.error(`Total retry time limit (${this.config.maxTotalTimeMs}ms) exceeded`);
230281
}
282+
283+
// Create tracing observation for timeout
284+
await this.createRetryTracingObservation(lastError, errorType, attempt, false, options.context);
285+
231286
throw lastError;
232287
}
233288

@@ -238,6 +293,9 @@ export class LLMRetryManager {
238293
logger.warn(`Retrying after ${delayMs}ms (attempt ${attempt + 1}/${retryConfig.maxRetries + 1}) for ${errorType}`);
239294
}
240295

296+
// Create tracing observation for retry attempt
297+
await this.createRetryTracingObservation(lastError, errorType, attempt, true, options.context);
298+
241299
// Call retry callback if provided
242300
if (this.onRetry) {
243301
this.onRetry(attempt, lastError, errorType, delayMs);

front_end/panels/ai_chat/LLM/OpenAIProvider.ts

Lines changed: 45 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -252,6 +252,39 @@ export class OpenAIProvider extends LLMBaseProvider {
252252
return result;
253253
}
254254

255+
/**
256+
* Creates a tracing observation for API errors
257+
*/
258+
private async createErrorTracingObservation(error: Error, payloadBody: any): Promise<void> {
259+
try {
260+
const { getCurrentTracingContext, createTracingProvider } = await import('../tracing/TracingConfig.js');
261+
const context = getCurrentTracingContext();
262+
if (context) {
263+
const tracingProvider = createTracingProvider();
264+
await tracingProvider.createObservation({
265+
id: `error-openai-api-${Date.now()}-${Math.random().toString(36).substring(2, 6)}`,
266+
name: 'OpenAI API Error',
267+
type: 'event',
268+
startTime: new Date(),
269+
input: {
270+
model: payloadBody.model,
271+
endpoint: OpenAIProvider.API_ENDPOINT,
272+
requestSize: JSON.stringify(payloadBody).length
273+
},
274+
error: error.message,
275+
metadata: {
276+
provider: 'openai',
277+
errorType: 'api_error',
278+
hasApiKey: !!this.apiKey
279+
}
280+
}, context.traceId);
281+
}
282+
} catch (tracingError) {
283+
// Don't fail the main operation due to tracing errors
284+
logger.debug('Failed to create error tracing observation:', tracingError);
285+
}
286+
}
287+
255288
/**
256289
* Makes a request to the OpenAI Responses API
257290
*/
@@ -269,7 +302,12 @@ export class OpenAIProvider extends LLMBaseProvider {
269302
if (!response.ok) {
270303
const errorData = await response.json();
271304
logger.error('OpenAI API error:', errorData);
272-
throw new Error(`OpenAI API error: ${response.statusText} - ${errorData?.error?.message || 'Unknown error'}`);
305+
const error = new Error(`OpenAI API error: ${response.statusText} - ${errorData?.error?.message || 'Unknown error'}`);
306+
307+
// Create tracing observation for API errors
308+
await this.createErrorTracingObservation(error, payloadBody);
309+
310+
throw error;
273311
}
274312

275313
const data = await response.json();
@@ -282,6 +320,12 @@ export class OpenAIProvider extends LLMBaseProvider {
282320
return data;
283321
} catch (error) {
284322
logger.error('OpenAI API request failed:', error);
323+
324+
// Create tracing observation for network/fetch errors
325+
if (error instanceof Error) {
326+
await this.createErrorTracingObservation(error, payloadBody);
327+
}
328+
285329
throw error;
286330
}
287331
}

front_end/panels/ai_chat/agent_framework/AgentRunner.ts

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@ import type { LLMResponse, ParsedLLMAction, LLMMessage, LLMProvider } from '../L
88
import type { Tool } from '../tools/Tools.js';
99
import { ChatMessageEntity, type ChatMessage, type ModelChatMessage, type ToolResultMessage } from '../ui/ChatView.js';
1010
import { createLogger } from '../core/Logger.js';
11+
import { createTracingProvider, getCurrentTracingContext } from '../tracing/TracingConfig.js';
12+
import type { TracingProvider } from '../tracing/TracingProvider.js';
1113

1214
const logger = createLogger('AgentRunner');
1315

@@ -353,6 +355,51 @@ export class AgentRunner {
353355
if (parsedAction.type === 'tool_call') {
354356
const { name: toolName, args: toolArgs } = parsedAction;
355357
const toolCallId = crypto.randomUUID(); // Generate unique ID for OpenAI format
358+
359+
// Create tool-call event observation using current tracing context
360+
const tracingContext = getCurrentTracingContext();
361+
logger.info(`AgentRunner tool call with tracing context:`, {
362+
hasTracingContext: !!tracingContext,
363+
traceId: tracingContext?.traceId,
364+
toolName,
365+
agentName
366+
});
367+
console.log(`[TRACING DEBUG] AgentRunner tool call with tracing context:`, {
368+
hasTracingContext: !!tracingContext,
369+
traceId: tracingContext?.traceId,
370+
toolName,
371+
agentName
372+
});
373+
374+
if (tracingContext?.traceId) {
375+
const tracingProvider = createTracingProvider();
376+
try {
377+
await tracingProvider.createObservation({
378+
id: `event-tool-call-runner-${Date.now()}-${Math.random().toString(36).substring(2, 6)}`,
379+
name: `Tool Call (AgentRunner): ${toolName}`,
380+
type: 'event',
381+
startTime: new Date(),
382+
parentObservationId: tracingContext.parentObservationId,
383+
input: {
384+
toolName,
385+
toolArgs,
386+
toolCallId,
387+
agentName,
388+
reasoning: llmResponse.reasoning?.summary
389+
},
390+
metadata: {
391+
executingAgent: agentName,
392+
toolCallId,
393+
phase: 'tool_call_decision',
394+
iteration,
395+
source: 'AgentRunner'
396+
}
397+
}, tracingContext.traceId);
398+
} catch (tracingError) {
399+
logger.warn('Failed to create tool-call tracing observation:', tracingError);
400+
}
401+
}
402+
356403
newModelMessage = {
357404
entity: ChatMessageEntity.MODEL,
358405
action: 'tool',

0 commit comments

Comments
 (0)