Skip to content

Commit 1ca7dd2

Browse files
authored
fix llamacloud api and markdown issue (#532)
1 parent 3d20990 commit 1ca7dd2

File tree

5 files changed

+461
-451
lines changed

5 files changed

+461
-451
lines changed

llama-index-server/llama_index/server/api/routers/chat.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -99,18 +99,15 @@ async def _text_stream(
9999
event: Union[AgentStream, StopEvent],
100100
) -> AsyncGenerator[str, None]:
101101
if isinstance(event, AgentStream):
102-
if event.delta.strip(): # Only yield non-empty deltas
103-
yield event.delta
102+
yield event.delta
104103
elif isinstance(event, StopEvent):
105104
if isinstance(event.result, str):
106105
yield event.result
107106
elif isinstance(event.result, AsyncGenerator):
108107
async for chunk in event.result:
109108
if isinstance(chunk, str):
110109
yield chunk
111-
elif (
112-
hasattr(chunk, "delta") and chunk.delta.strip()
113-
): # Only yield non-empty deltas
110+
elif hasattr(chunk, "delta") and chunk.delta:
114111
yield chunk.delta
115112

116113
stream_started = False

llama-index-server/llama_index/server/services/llamacloud/file.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ def add_file_to_pipeline(
7474
"custom_metadata": {"file_id": file_id, **(custom_metadata or {})},
7575
}
7676
]
77-
files = client.pipelines.add_files_to_pipeline(pipeline_id, request=files)
77+
files = client.pipelines.add_files_to_pipeline_api(pipeline_id, request=files)
7878

7979
if not wait_for_processing:
8080
return file_id

0 commit comments

Comments
 (0)