Skip to content

Commit 13a38c3

Browse files
authored
Fixing not using set_llm_session_ids from fh-llm-client (#792)
1 parent 2d1f7ca commit 13a38c3

File tree

3 files changed

+8
-28
lines changed

3 files changed

+8
-28
lines changed

paperqa/docs.py

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
LLMModel,
2121
LLMResult,
2222
)
23+
from llmclient.types import set_llm_session_ids
2324
from pydantic import (
2425
BaseModel,
2526
ConfigDict,
@@ -39,14 +40,7 @@
3940
from paperqa.prompts import CANNOT_ANSWER_PHRASE
4041
from paperqa.readers import read_doc
4142
from paperqa.settings import MaybeSettings, get_settings
42-
from paperqa.types import (
43-
Doc,
44-
DocDetails,
45-
DocKey,
46-
PQASession,
47-
Text,
48-
set_llm_session_ids,
49-
)
43+
from paperqa.types import Doc, DocDetails, DocKey, PQASession, Text
5044
from paperqa.utils import (
5145
gather_with_concurrency,
5246
get_loop,

paperqa/types.py

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,10 @@
11
from __future__ import annotations
22

3-
import contextvars
43
import logging
54
import os
65
import re
76
import warnings
87
from collections.abc import Collection
9-
from contextlib import contextmanager
108
from datetime import datetime
119
from typing import Any, ClassVar, cast
1210
from uuid import UUID, uuid4
@@ -40,18 +38,6 @@
4038
DocKey = Any
4139
logger = logging.getLogger(__name__)
4240

43-
# A context var that will be unique to threads/processes
44-
cvar_session_id = contextvars.ContextVar[UUID | None]("session_id", default=None)
45-
46-
47-
@contextmanager
48-
def set_llm_session_ids(session_id: UUID):
49-
token = cvar_session_id.set(session_id)
50-
try:
51-
yield
52-
finally:
53-
cvar_session_id.reset(token)
54-
5541

5642
class Doc(Embeddable):
5743
docname: str

tests/test_paperqa.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
HybridEmbeddingModel,
2020
LiteLLMEmbeddingModel,
2121
LLMModel,
22+
LLMResult,
2223
SparseEmbeddingModel,
2324
)
2425
from pytest_subtests import SubTests
@@ -583,20 +584,19 @@ def test_query(docs_fixture) -> None:
583584
docs_fixture.query("Is XAI usable in chemistry?")
584585

585586

586-
def test_llmresult_callback(docs_fixture) -> None:
587-
my_results = []
588-
589-
async def my_callback(result) -> None:
590-
my_results.append(result)
587+
def test_llmresult_callback(docs_fixture: Docs) -> None:
588+
my_results: list[LLMResult] = []
591589

592590
settings = Settings.from_name("fast")
593591
summary_llm = settings.get_summary_llm()
594-
summary_llm.llm_result_callback = my_callback
592+
summary_llm.llm_result_callback = my_results.append
595593
docs_fixture.get_evidence(
596594
"What is XAI?", settings=settings, summary_llm_model=summary_llm
597595
)
598596
assert my_results
597+
assert len(my_results) >= 1, "Expected the callback to append results"
599598
assert my_results[0].name
599+
assert my_results[0].session_id
600600

601601

602602
def test_duplicate(stub_data_dir: Path) -> None:

0 commit comments

Comments
 (0)