-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathchatbot_service.py
More file actions
110 lines (88 loc) · 3.98 KB
/
chatbot_service.py
File metadata and controls
110 lines (88 loc) · 3.98 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
"""
LangGraph chatbot service wrapper
"""
import os
from typing import List, Dict, Any
from langchain_openai import ChatOpenAI
from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import add_messages
from typing import Annotated, TypedDict
from models import ChatMessage, MessageRole
class ChatBotState(TypedDict):
messages: Annotated[list, add_messages]
class ChatbotService:
def __init__(self, openai_api_key: str, model: str = "gpt-4o-mini", temperature: float = 0):
"""Initialize the chatbot service with OpenAI configuration"""
self.llm_openai = ChatOpenAI(
model=model,
temperature=temperature,
api_key=openai_api_key
)
# Create the LangGraph workflow
self.app = self._create_workflow()
self.chatbot = self.app.compile()
def _create_workflow(self) -> StateGraph:
"""Create the LangGraph workflow"""
def chat(state: ChatBotState) -> ChatBotState:
messages = state["messages"]
response = self.llm_openai.invoke(messages)
return {"messages": [response]}
app = StateGraph(ChatBotState)
app.add_node("Chat", chat)
app.add_edge(START, "Chat")
app.add_edge("Chat", END)
return app
def _convert_messages_to_langchain_format(self, messages: List[ChatMessage]) -> List[Dict[str, Any]]:
"""Convert ChatMessage objects to LangChain message format"""
langchain_messages = []
for message in messages:
if message.role == MessageRole.USER:
langchain_messages.append({"role": "human", "content": message.content})
elif message.role == MessageRole.ASSISTANT:
langchain_messages.append({"role": "ai", "content": message.content})
return langchain_messages
def generate_response(self, messages: List[ChatMessage]) -> str:
"""
Generate a response from the chatbot given conversation history
Args:
messages: List of ChatMessage objects representing conversation history
Returns:
Generated response string
Raises:
Exception: If there's an error with the OpenAI API or LangGraph processing
"""
try:
# Convert messages to LangChain format
langchain_messages = self._convert_messages_to_langchain_format(messages)
# If no messages, provide a default greeting
if not langchain_messages:
langchain_messages = [{"role": "human", "content": "Hello"}]
# Invoke the chatbot
result = self.chatbot.invoke({"messages": langchain_messages})
# Extract the response content
response_messages = result.get("messages", [])
if response_messages:
# Get the last message content
last_message = response_messages[-1]
if hasattr(last_message, 'content'):
return last_message.content
else:
return str(last_message)
return "I'm sorry, I couldn't generate a response. Please try again."
except Exception as e:
# Log the error (in a real application, use proper logging)
print(f"Error generating response: {str(e)}")
raise Exception(f"Failed to generate response: {str(e)}")
def health_check(self) -> bool:
"""
Perform a basic health check of the chatbot service
Returns:
True if service is healthy, False otherwise
"""
try:
# Try a simple test message
test_messages = [ChatMessage(role=MessageRole.USER, content="Hello")]
response = self.generate_response(test_messages)
return len(response) > 0
except Exception:
return False