Skip to content

Commit a79b097

Browse files
authored
UN-2453 [FIX] Mistral AI LLM adapter test connection fix (#195)
* Fixed max_tokens in mistral.py * cleared unnecessary spaces
1 parent de5cafe commit a79b097

File tree

2 files changed

+2
-3
lines changed

2 files changed

+2
-3
lines changed

src/unstract/sdk/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = "v0.76.0"
1+
__version__ = "v0.76.1"
22

33

44
def get_sdk_version() -> str:

src/unstract/sdk/adapters/llm/mistral/src/mistral.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
from llama_index.llms.mistralai import MistralAI
66
from llama_index.llms.mistralai.base import DEFAULT_MISTRALAI_MAX_TOKENS
77
from mistralai.models import SDKError as MistralError
8-
98
from unstract.sdk.adapters.exceptions import AdapterError
109
from unstract.sdk.adapters.llm.constants import LLMKeys
1110
from unstract.sdk.adapters.llm.llm_adapter import LLMAdapter
@@ -52,7 +51,7 @@ def get_llm_instance(self) -> LLM:
5251
self.config.get(Constants.MAX_RETRIES, LLMKeys.DEFAULT_MAX_RETRIES)
5352
)
5453
max_tokens = int(
55-
self.config.get(Constants.MAX_RETRIES, DEFAULT_MISTRALAI_MAX_TOKENS)
54+
self.config.get(Constants.MAX_TOKENS, DEFAULT_MISTRALAI_MAX_TOKENS)
5655
)
5756
try:
5857
llm: LLM = MistralAI(

0 commit comments

Comments
 (0)