Skip to content

Commit 02d1966

Browse files
michaelchiapre-commit-ci[bot]dlqqq
authored
Support pending/loading message while waiting for response (#821)
* support pending message draft * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * styling + pending message for /fix * change default pending message * remove persona groups * inline styling * single timestamp * use message id as component key Co-authored-by: david qiu <[email protected]> * fix conditional useEffect * prefer MUI Typography in PendingMessageElement to match font size * merge 2 outer div elements into 1 --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: david qiu <[email protected]>
1 parent ff022fe commit 02d1966

File tree

9 files changed

+262
-34
lines changed

9 files changed

+262
-34
lines changed

packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,8 +71,9 @@ async def process_message(self, message: HumanChatMessage):
7171
self.get_llm_chain()
7272

7373
try:
74-
result = await self.llm_chain.acall({"question": query})
75-
response = result["answer"]
74+
with self.pending("Searching learned documents"):
75+
result = await self.llm_chain.acall({"question": query})
76+
response = result["answer"]
7677
self.reply(response, message)
7778
except AssertionError as e:
7879
self.log.error(e)

packages/jupyter-ai/jupyter_ai/chat_handlers/base.py

Lines changed: 59 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import argparse
2+
import contextlib
23
import os
34
import time
45
import traceback
@@ -17,7 +18,13 @@
1718

1819
from dask.distributed import Client as DaskClient
1920
from jupyter_ai.config_manager import ConfigManager, Logger
20-
from jupyter_ai.models import AgentChatMessage, ChatMessage, HumanChatMessage
21+
from jupyter_ai.models import (
22+
AgentChatMessage,
23+
ChatMessage,
24+
ClosePendingMessage,
25+
HumanChatMessage,
26+
PendingMessage,
27+
)
2128
from jupyter_ai_magics import Persona
2229
from jupyter_ai_magics.providers import BaseProvider
2330
from langchain.pydantic_v1 import BaseModel
@@ -193,6 +200,57 @@ def reply(self, response: str, human_msg: Optional[HumanChatMessage] = None):
193200
handler.broadcast_message(agent_msg)
194201
break
195202

203+
def start_pending(self, text: str, ellipsis: bool = True) -> str:
204+
"""
205+
Sends a pending message to the client.
206+
207+
Returns the pending message ID.
208+
"""
209+
persona = self.config_manager.persona
210+
211+
pending_msg = PendingMessage(
212+
id=uuid4().hex,
213+
time=time.time(),
214+
body=text,
215+
persona=Persona(name=persona.name, avatar_route=persona.avatar_route),
216+
ellipsis=ellipsis,
217+
)
218+
219+
for handler in self._root_chat_handlers.values():
220+
if not handler:
221+
continue
222+
223+
handler.broadcast_message(pending_msg)
224+
break
225+
return pending_msg
226+
227+
def close_pending(self, pending_msg: PendingMessage):
228+
"""
229+
Closes a pending message.
230+
"""
231+
close_pending_msg = ClosePendingMessage(
232+
id=pending_msg.id,
233+
)
234+
235+
for handler in self._root_chat_handlers.values():
236+
if not handler:
237+
continue
238+
239+
handler.broadcast_message(close_pending_msg)
240+
break
241+
242+
@contextlib.contextmanager
243+
def pending(self, text: str, ellipsis: bool = True):
244+
"""
245+
Context manager that sends a pending message to the client, and closes
246+
it after the block is executed.
247+
"""
248+
pending_msg = self.start_pending(text, ellipsis=ellipsis)
249+
try:
250+
yield
251+
finally:
252+
self.close_pending(pending_msg)
253+
196254
def get_llm_chain(self):
197255
lm_provider = self.config_manager.lm_provider
198256
lm_provider_params = self.config_manager.lm_provider_params

packages/jupyter-ai/jupyter_ai/chat_handlers/default.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,5 +45,8 @@ def create_llm_chain(
4545

4646
async def process_message(self, message: HumanChatMessage):
4747
self.get_llm_chain()
48-
response = await self.llm_chain.apredict(input=message.body, stop=["\nHuman:"])
48+
with self.pending("Generating response"):
49+
response = await self.llm_chain.apredict(
50+
input=message.body, stop=["\nHuman:"]
51+
)
4952
self.reply(response, message)

packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -92,12 +92,13 @@ async def process_message(self, message: HumanChatMessage):
9292
extra_instructions = message.body[4:].strip() or "None."
9393

9494
self.get_llm_chain()
95-
response = await self.llm_chain.apredict(
96-
extra_instructions=extra_instructions,
97-
stop=["\nHuman:"],
98-
cell_content=selection.source,
99-
error_name=selection.error.name,
100-
error_value=selection.error.value,
101-
traceback="\n".join(selection.error.traceback),
102-
)
95+
with self.pending("Analyzing error"):
96+
response = await self.llm_chain.apredict(
97+
extra_instructions=extra_instructions,
98+
stop=["\nHuman:"],
99+
cell_content=selection.source,
100+
error_name=selection.error.name,
101+
error_value=selection.error.value,
102+
traceback="\n".join(selection.error.traceback),
103+
)
103104
self.reply(response, message)

packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py

Lines changed: 11 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -151,19 +151,17 @@ async def process_message(self, message: HumanChatMessage):
151151
# delete and relearn index if embedding model was changed
152152
await self.delete_and_relearn()
153153

154-
if args.verbose:
155-
self.reply(f"Loading and splitting files for {load_path}", message)
156-
157-
try:
158-
await self.learn_dir(
159-
load_path, args.chunk_size, args.chunk_overlap, args.all_files
160-
)
161-
except Exception as e:
162-
response = f"""Learn documents in **{load_path}** failed. {str(e)}."""
163-
else:
164-
self.save()
165-
response = f"""🎉 I have learned documents at **{load_path}** and I am ready to answer questions about them.
166-
You can ask questions about these docs by prefixing your message with **/ask**."""
154+
with self.pending(f"Loading and splitting files for {load_path}"):
155+
try:
156+
await self.learn_dir(
157+
load_path, args.chunk_size, args.chunk_overlap, args.all_files
158+
)
159+
except Exception as e:
160+
response = f"""Learn documents in **{load_path}** failed. {str(e)}."""
161+
else:
162+
self.save()
163+
response = f"""🎉 I have learned documents at **{load_path}** and I am ready to answer questions about them.
164+
You can ask questions about these docs by prefixing your message with **/ask**."""
167165
self.reply(response, message)
168166

169167
def _build_list_response(self):

packages/jupyter-ai/jupyter_ai/models.py

Lines changed: 22 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,13 +87,34 @@ class ClearMessage(BaseModel):
8787
type: Literal["clear"] = "clear"
8888

8989

90+
class PendingMessage(BaseModel):
91+
type: Literal["pending"] = "pending"
92+
id: str
93+
time: float
94+
body: str
95+
persona: Persona
96+
ellipsis: bool = True
97+
98+
99+
class ClosePendingMessage(BaseModel):
100+
type: Literal["pending"] = "close-pending"
101+
id: str
102+
103+
90104
# the type of messages being broadcast to clients
91105
ChatMessage = Union[
92106
AgentChatMessage,
93107
HumanChatMessage,
94108
]
95109

96-
Message = Union[AgentChatMessage, HumanChatMessage, ConnectionMessage, ClearMessage]
110+
Message = Union[
111+
AgentChatMessage,
112+
HumanChatMessage,
113+
ConnectionMessage,
114+
ClearMessage,
115+
PendingMessage,
116+
ClosePendingMessage,
117+
]
97118

98119

99120
class ChatHistory(BaseModel):

packages/jupyter-ai/src/components/chat.tsx

Lines changed: 22 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import { IRenderMimeRegistry } from '@jupyterlab/rendermime';
99

1010
import { JlThemeProvider } from './jl-theme-provider';
1111
import { ChatMessages } from './chat-messages';
12+
import { PendingMessages } from './pending-messages';
1213
import { ChatInput } from './chat-input';
1314
import { ChatSettings } from './chat-settings';
1415
import { AiService } from '../handler';
@@ -38,6 +39,9 @@ function ChatBody({
3839
rmRegistry: renderMimeRegistry
3940
}: ChatBodyProps): JSX.Element {
4041
const [messages, setMessages] = useState<AiService.ChatMessage[]>([]);
42+
const [pendingMessages, setPendingMessages] = useState<
43+
AiService.PendingMessage[]
44+
>([]);
4145
const [showWelcomeMessage, setShowWelcomeMessage] = useState<boolean>(false);
4246
const [includeSelection, setIncludeSelection] = useState(true);
4347
const [replaceSelection, setReplaceSelection] = useState(false);
@@ -73,14 +77,24 @@ function ChatBody({
7377
*/
7478
useEffect(() => {
7579
function handleChatEvents(message: AiService.Message) {
76-
if (message.type === 'connection') {
77-
return;
78-
} else if (message.type === 'clear') {
79-
setMessages([]);
80-
return;
80+
switch (message.type) {
81+
case 'connection':
82+
return;
83+
case 'clear':
84+
setMessages([]);
85+
return;
86+
case 'pending':
87+
setPendingMessages(pendingMessages => [...pendingMessages, message]);
88+
return;
89+
case 'close-pending':
90+
setPendingMessages(pendingMessages =>
91+
pendingMessages.filter(p => p.id !== message.id)
92+
);
93+
return;
94+
default:
95+
setMessages(messageGroups => [...messageGroups, message]);
96+
return;
8197
}
82-
83-
setMessages(messageGroups => [...messageGroups, message]);
8498
}
8599

86100
chatHandler.addListener(handleChatEvents);
@@ -157,6 +171,7 @@ function ChatBody({
157171
<>
158172
<ScrollContainer sx={{ flexGrow: 1 }}>
159173
<ChatMessages messages={messages} rmRegistry={renderMimeRegistry} />
174+
<PendingMessages messages={pendingMessages} />
160175
</ScrollContainer>
161176
<ChatInput
162177
value={input}
Lines changed: 115 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,115 @@
1+
import React, { useState, useEffect } from 'react';
2+
3+
import { Box, Typography } from '@mui/material';
4+
import { AiService } from '../handler';
5+
import { ChatMessageHeader } from './chat-messages';
6+
7+
type PendingMessagesProps = {
8+
messages: AiService.PendingMessage[];
9+
};
10+
11+
type PendingMessageElementProps = {
12+
text: string;
13+
ellipsis: boolean;
14+
};
15+
16+
function PendingMessageElement(props: PendingMessageElementProps): JSX.Element {
17+
const [dots, setDots] = useState('');
18+
19+
useEffect(() => {
20+
const interval = setInterval(() => {
21+
setDots(dots => (dots.length < 3 ? dots + '.' : ''));
22+
}, 500);
23+
24+
return () => clearInterval(interval);
25+
}, []);
26+
27+
let text = props.text;
28+
if (props.ellipsis) {
29+
text = props.text + dots;
30+
}
31+
32+
return (
33+
<Box>
34+
{text.split('\n').map((line, index) => (
35+
<Typography key={index} sx={{ lineHeight: 0.6 }}>
36+
{line}
37+
</Typography>
38+
))}
39+
</Box>
40+
);
41+
}
42+
43+
export function PendingMessages(
44+
props: PendingMessagesProps
45+
): JSX.Element | null {
46+
const [timestamp, setTimestamp] = useState<string>('');
47+
const [agentMessage, setAgentMessage] =
48+
useState<AiService.AgentChatMessage | null>(null);
49+
50+
useEffect(() => {
51+
if (props.messages.length === 0) {
52+
setAgentMessage(null);
53+
setTimestamp('');
54+
return;
55+
}
56+
const lastMessage = props.messages[props.messages.length - 1];
57+
setAgentMessage({
58+
type: 'agent',
59+
id: lastMessage.id,
60+
time: lastMessage.time,
61+
body: '',
62+
reply_to: '',
63+
persona: lastMessage.persona
64+
});
65+
66+
// timestamp format copied from ChatMessage
67+
const newTimestamp = new Date(lastMessage.time * 1000).toLocaleTimeString(
68+
[],
69+
{
70+
hour: 'numeric',
71+
minute: '2-digit'
72+
}
73+
);
74+
setTimestamp(newTimestamp);
75+
}, [props.messages]);
76+
77+
if (!agentMessage) {
78+
return null;
79+
}
80+
81+
return (
82+
<Box
83+
sx={{
84+
padding: 4,
85+
borderTop: '1px solid var(--jp-border-color2)'
86+
}}
87+
>
88+
<ChatMessageHeader
89+
message={agentMessage}
90+
timestamp={timestamp}
91+
sx={{
92+
marginBottom: 4
93+
}}
94+
/>
95+
<Box
96+
sx={{
97+
marginBottom: 1,
98+
paddingRight: 0,
99+
color: 'var(--jp-ui-font-color2)',
100+
'& > :not(:last-child)': {
101+
marginBottom: '2em'
102+
}
103+
}}
104+
>
105+
{props.messages.map(message => (
106+
<PendingMessageElement
107+
key={message.id}
108+
text={message.body}
109+
ellipsis={message.ellipsis}
110+
/>
111+
))}
112+
</Box>
113+
</Box>
114+
);
115+
}

packages/jupyter-ai/src/handler.ts

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,12 +115,28 @@ export namespace AiService {
115115
type: 'clear';
116116
};
117117

118+
export type PendingMessage = {
119+
type: 'pending';
120+
id: string;
121+
time: number;
122+
body: string;
123+
persona: Persona;
124+
ellipsis: boolean;
125+
};
126+
127+
export type ClosePendingMessage = {
128+
type: 'close-pending';
129+
id: string;
130+
};
131+
118132
export type ChatMessage = AgentChatMessage | HumanChatMessage;
119133
export type Message =
120134
| AgentChatMessage
121135
| HumanChatMessage
122136
| ConnectionMessage
123-
| ClearMessage;
137+
| ClearMessage
138+
| PendingMessage
139+
| ClosePendingMessage;
124140

125141
export type ChatHistory = {
126142
messages: ChatMessage[];

0 commit comments

Comments
 (0)