Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
dccd647
feat(llm): try to improve tool and tool calling, part 1
perfectra1n Jun 11, 2025
87fd6af
feat(llm): try to improve tool and tool calling, part 2
perfectra1n Jun 11, 2025
e98954c
Merge branch 'develop' into feat/llm-tool-improvement
perfectra1n Jun 19, 2025
e0e1f07
feat(llm): try to squeeze even more out of the tools
perfectra1n Jun 19, 2025
6e0fee6
fix(llm): resolve tool lint errors
perfectra1n Jun 19, 2025
d798d29
fix(llm): remove the vector search tool from the search_notes tool
perfectra1n Jun 20, 2025
8bc70a4
Merge branch 'develop' into feat/llm-tool-improvement
perfectra1n Jun 20, 2025
4e36dc8
Merge branch 'develop' into feat/llm-tool-improvement
perfectra1n Jun 20, 2025
755b1ed
Merge branch 'main' into feat/llm-tool-improvement
perfectra1n Jun 26, 2025
27847ab
debug(llm): add some llm debug tools
perfectra1n Jun 30, 2025
c119ffe
Merge branch 'main' into feat/llm-tool-improvement
perfectra1n Jun 30, 2025
5562559
feat(llm): try to improve tool calling, part 4
perfectra1n Jul 4, 2025
6fbc5b2
feat(llm): implement error recovery stage and implement better tool c…
perfectra1n Jul 4, 2025
e0383c4
feat(llm): provide better user feedback when working
perfectra1n Jul 4, 2025
42ee351
Merge branch 'main' into feat/llm-tool-improvement
perfectra1n Jul 4, 2025
4c01d7d
fix(llm): resolve compilation issues due to additional stages
perfectra1n Jul 5, 2025
89fcfab
Merge remote-tracking branch 'origin/main' into feat/llm-tool-improve…
perfectra1n Aug 9, 2025
0d89838
feat(llm): try to stop some of the horrible memory management
perfectra1n Aug 9, 2025
3db145b
feat(llm): update pipeline steps
perfectra1n Aug 9, 2025
a1e596b
feat(llm): get rid of now unused files
perfectra1n Aug 9, 2025
97ec882
feat(llm): resolve compilation and typecheck errors
perfectra1n Aug 9, 2025
f89c202
feat(llm): add additional logic for tools
perfectra1n Aug 9, 2025
16622f4
feat(llm): implement circuitbreaker to prevent going haywire
perfectra1n Aug 9, 2025
d38ca72
feat(llm): remove overly complex circuit breaker
perfectra1n Aug 9, 2025
ac415c1
feat(llm): try to coerce the LLM some more for tool calling
perfectra1n Aug 9, 2025
b37d9b4
feat(llm): add smart search tool for unified search interface
perfectra1n Aug 9, 2025
8da904c
feat(llm): remove unified_search_tool.ts to eliminate duplicate searc…
perfectra1n Aug 9, 2025
2958ae4
feat(llm): implement Phase 2.3 Smart Parameter Processing with fuzzy …
perfectra1n Aug 9, 2025
cec627a
feat(llm): much better tool calling and tests
perfectra1n Aug 10, 2025
bb3d0f0
feat(llm): yeet a lot of unused tools
perfectra1n Aug 10, 2025
778f13e
feat(llm): add missing options interfaces for llm
perfectra1n Aug 10, 2025
eb2ace4
feat(llm): update llm tests for update tool executions
perfectra1n Aug 16, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
129 changes: 126 additions & 3 deletions apps/client/src/widgets/llm_chat/communication.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,9 @@ export async function checkSessionExists(noteId: string): Promise<boolean> {
* @param onContentUpdate - Callback for content updates
* @param onThinkingUpdate - Callback for thinking updates
* @param onToolExecution - Callback for tool execution
* @param onProgressUpdate - Callback for progress updates
* @param onUserInteraction - Callback for user interaction requests
* @param onErrorRecovery - Callback for error recovery options
* @param onComplete - Callback for completion
* @param onError - Callback for errors
*/
Expand All @@ -57,6 +60,9 @@ export async function setupStreamingResponse(
onContentUpdate: (content: string, isDone?: boolean) => void,
onThinkingUpdate: (thinking: string) => void,
onToolExecution: (toolData: any) => void,
onProgressUpdate: (progressData: any) => void,
onUserInteraction: (interactionData: any) => Promise<any>,
onErrorRecovery: (errorData: any) => Promise<any>,
onComplete: () => void,
onError: (error: Error) => void
): Promise<void> {
Expand All @@ -66,9 +72,14 @@ export async function setupStreamingResponse(
let timeoutId: number | null = null;
let initialTimeoutId: number | null = null;
let cleanupTimeoutId: number | null = null;
let heartbeatTimeoutId: number | null = null;
let receivedAnyMessage = false;
let eventListener: ((event: Event) => void) | null = null;
let lastMessageTimestamp = 0;

// Configuration for timeouts
const HEARTBEAT_TIMEOUT_MS = 30000; // 30 seconds between messages
const MAX_IDLE_TIME_MS = 60000; // 60 seconds max idle time

// Create a unique identifier for this response process
const responseId = `llm-stream-${Date.now()}-${Math.floor(Math.random() * 1000)}`;
Expand Down Expand Up @@ -101,12 +112,43 @@ export async function setupStreamingResponse(
}
})();

// Function to reset heartbeat timeout
const resetHeartbeatTimeout = () => {
if (heartbeatTimeoutId) {
window.clearTimeout(heartbeatTimeoutId);
}

heartbeatTimeoutId = window.setTimeout(() => {
const idleTime = Date.now() - lastMessageTimestamp;
console.warn(`[${responseId}] No message received for ${idleTime}ms`);

if (idleTime > MAX_IDLE_TIME_MS) {
console.error(`[${responseId}] Connection appears to be stalled (idle for ${idleTime}ms)`);
performCleanup();
reject(new Error('Connection lost: The AI service stopped responding. Please try again.'));
} else {
// Send a warning but continue waiting
console.warn(`[${responseId}] Connection may be slow, continuing to wait...`);
resetHeartbeatTimeout(); // Reset for another check
}
}, HEARTBEAT_TIMEOUT_MS);
};

// Function to safely perform cleanup
const performCleanup = () => {
// Clear all timeouts
if (cleanupTimeoutId) {
window.clearTimeout(cleanupTimeoutId);
cleanupTimeoutId = null;
}
if (heartbeatTimeoutId) {
window.clearTimeout(heartbeatTimeoutId);
heartbeatTimeoutId = null;
}
if (initialTimeoutId) {
window.clearTimeout(initialTimeoutId);
initialTimeoutId = null;
}

console.log(`[${responseId}] Performing final cleanup of event listener`);
cleanupEventListener(eventListener);
Expand All @@ -115,13 +157,15 @@ export async function setupStreamingResponse(
};

// Set initial timeout to catch cases where no message is received at all
// Increased timeout and better error messaging
const INITIAL_TIMEOUT_MS = 15000; // 15 seconds for initial response
initialTimeoutId = window.setTimeout(() => {
if (!receivedAnyMessage) {
console.error(`[${responseId}] No initial message received within timeout`);
console.error(`[${responseId}] No initial message received within ${INITIAL_TIMEOUT_MS}ms timeout`);
performCleanup();
reject(new Error('No response received from server'));
reject(new Error('Connection timeout: The AI service is taking longer than expected to respond. Please check your connection and try again.'));
}
}, 10000);
}, INITIAL_TIMEOUT_MS);

// Create a message handler for CustomEvents
eventListener = (event: Event) => {
Expand Down Expand Up @@ -155,6 +199,12 @@ export async function setupStreamingResponse(
window.clearTimeout(initialTimeoutId);
initialTimeoutId = null;
}

// Start heartbeat monitoring
resetHeartbeatTimeout();
} else {
// Reset heartbeat on each new message
resetHeartbeatTimeout();
}

// Handle error
Expand All @@ -177,6 +227,28 @@ export async function setupStreamingResponse(
onToolExecution(message.toolExecution);
}

// Handle progress updates
if (message.progressUpdate) {
console.log(`[${responseId}] Progress update:`, message.progressUpdate);
onProgressUpdate(message.progressUpdate);
}

// Handle user interaction requests
if (message.userInteraction) {
console.log(`[${responseId}] User interaction request:`, message.userInteraction);
onUserInteraction(message.userInteraction).catch(error => {
console.error(`[${responseId}] Error handling user interaction:`, error);
});
}

// Handle error recovery options
if (message.errorRecovery) {
console.log(`[${responseId}] Error recovery options:`, message.errorRecovery);
onErrorRecovery(message.errorRecovery).catch(error => {
console.error(`[${responseId}] Error handling error recovery:`, error);
});
}

// Handle content updates
if (message.content) {
// Simply append the new content - no complex deduplication
Expand Down Expand Up @@ -258,3 +330,54 @@ export async function getDirectResponse(noteId: string, messageParams: any): Pro
}
}

/**
* Send user interaction response
* @param interactionId - The interaction ID
* @param response - The user's response
*/
export async function sendUserInteractionResponse(interactionId: string, response: string): Promise<void> {
try {
await server.post<any>(`llm/interactions/${interactionId}/respond`, {
response: response
});
console.log(`User interaction response sent: ${interactionId} -> ${response}`);
} catch (error) {
console.error('Error sending user interaction response:', error);
throw error;
}
}

/**
* Send error recovery choice
* @param sessionId - The chat session ID
* @param errorId - The error ID
* @param action - The recovery action chosen
* @param parameters - Optional parameters for the action
*/
export async function sendErrorRecoveryChoice(sessionId: string, errorId: string, action: string, parameters?: any): Promise<void> {
try {
await server.post<any>(`llm/chat/${sessionId}/error/${errorId}/recover`, {
action: action,
parameters: parameters
});
console.log(`Error recovery choice sent: ${errorId} -> ${action}`);
} catch (error) {
console.error('Error sending error recovery choice:', error);
throw error;
}
}

/**
* Cancel ongoing operations
* @param sessionId - The chat session ID
*/
export async function cancelChatOperations(sessionId: string): Promise<void> {
try {
await server.post<any>(`llm/chat/${sessionId}/cancel`, {});
console.log(`Chat operations cancelled for session: ${sessionId}`);
} catch (error) {
console.error('Error cancelling chat operations:', error);
throw error;
}
}

Loading