diff --git a/api/config/embedder.json b/api/config/embedder.json index a70bdb41b..df8ade904 100644 --- a/api/config/embedder.json +++ b/api/config/embedder.json @@ -8,6 +8,12 @@ "encoding_format": "float" } }, + "embedder_ollama": { + "client_class": "OllamaClient", + "model_kwargs": { + "model": "nomic-embed-text" + } + }, "retriever": { "top_k": 20 }, diff --git a/api/config/generator.json b/api/config/generator.json index f88179098..90d398063 100644 --- a/api/config/generator.json +++ b/api/config/generator.json @@ -70,6 +70,9 @@ }, "o4-mini": { "temperature": 1.0 + }, + "claude-3.7-sonnet": { + "temperature": 1.0 } } }, diff --git a/api/main.py b/api/main.py index a19892617..37145a6b3 100644 --- a/api/main.py +++ b/api/main.py @@ -4,6 +4,8 @@ import logging from dotenv import load_dotenv +sys.path.insert(0, os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))) + # Load environment variables from .env file load_dotenv() diff --git a/api/openai_client.py b/api/openai_client.py index bc75ed586..86445fad1 100644 --- a/api/openai_client.py +++ b/api/openai_client.py @@ -178,7 +178,7 @@ def __init__( self._api_key = api_key self._env_api_key_name = env_api_key_name self._env_base_url_name = env_base_url_name - self.base_url = base_url or os.getenv(self._env_base_url_name, "https://api.openai.com/v1") + self.base_url = base_url or os.getenv(self._env_base_url_name, "http://10.239.142.219:8012/") self.sync_client = self.init_sync_client() self.async_client = None # only initialize if the async call is called self.chat_completion_parser = ( diff --git a/api/websocket_wiki.py b/api/websocket_wiki.py index 2a7cce9e3..3dd2488e5 100644 --- a/api/websocket_wiki.py +++ b/api/websocket_wiki.py @@ -459,8 +459,10 @@ async def handle_websocket_chat(websocket: WebSocket): model_kwargs = { "model": request.model, "stream": True, - "temperature": model_config["temperature"] + #"temperature": model_config["temperature"] } + if "temperature" in model_config: + model_kwargs["temperature"] = model_config["temperature"] # Only add top_p if it exists in the model config if "top_p" in model_config: model_kwargs["top_p"] = model_config["top_p"] diff --git a/src/components/Markdown.tsx b/src/components/Markdown.tsx index 69dcfe8f7..c50326abf 100644 --- a/src/components/Markdown.tsx +++ b/src/components/Markdown.tsx @@ -3,7 +3,7 @@ import ReactMarkdown from 'react-markdown'; import remarkGfm from 'remark-gfm'; import rehypeRaw from 'rehype-raw'; import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter'; -import { tomorrow } from 'react-syntax-highlighter/dist/cjs/styles/prism'; +import { tomorrow } from 'react-syntax-highlighter/dist/esm/styles/prism'; import Mermaid from './Mermaid'; interface MarkdownProps {