Skip to content

Commit e6ec9e9

Browse files
Migrate to ChatOllama base class in Ollama provider (#1015)
* Added separate `ollama` provider Created a separate file `ollama.py` as a unique provider. Refactored other code accordingly. Also changed the `Ollama` class to `ChatOllama` so that it can support binding tools to the LLM. Updated the imports to come from `langchain_ollama` instead of `langchain_community` Tested on several Ollama models, both LLMs and embedding models: `mxbai-embed-large`, `nomic-embed-text`, `ima/deepseek-math`, `mathstral`, `qwen2-math`, `snowflake-arctic-embed`, `mistral`, `llama3.1`, `starcoder2:15b-instruct` * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 0884211 commit e6ec9e9

File tree

5 files changed

+36
-40
lines changed

5 files changed

+36
-40
lines changed

packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
BaseEmbeddingsProvider,
66
GPT4AllEmbeddingsProvider,
77
HfHubEmbeddingsProvider,
8-
OllamaEmbeddingsProvider,
98
QianfanEmbeddingsEndpointProvider,
109
)
1110
from .exception import store_exception
@@ -21,7 +20,6 @@
2120
BaseProvider,
2221
GPT4AllProvider,
2322
HfHubProvider,
24-
OllamaProvider,
2523
QianfanProvider,
2624
TogetherAIProvider,
2725
)

packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
from langchain_community.embeddings import (
1111
GPT4AllEmbeddings,
1212
HuggingFaceHubEmbeddings,
13-
OllamaEmbeddings,
1413
QianfanEmbeddingsEndpoint,
1514
)
1615

@@ -65,19 +64,6 @@ def __init__(self, *args, **kwargs):
6564
super().__init__(*args, **kwargs, **model_kwargs)
6665

6766

68-
class OllamaEmbeddingsProvider(BaseEmbeddingsProvider, OllamaEmbeddings):
69-
id = "ollama"
70-
name = "Ollama"
71-
# source: https://ollama.com/library
72-
models = [
73-
"nomic-embed-text",
74-
"mxbai-embed-large",
75-
"all-minilm",
76-
"snowflake-arctic-embed",
77-
]
78-
model_id_key = "model"
79-
80-
8167
class HfHubEmbeddingsProvider(BaseEmbeddingsProvider, HuggingFaceHubEmbeddings):
8268
id = "huggingface_hub"
8369
name = "Hugging Face Hub"
Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
from langchain_ollama import ChatOllama, OllamaEmbeddings
2+
3+
from ..embedding_providers import BaseEmbeddingsProvider
4+
from ..providers import BaseProvider, EnvAuthStrategy, TextField
5+
6+
7+
class OllamaProvider(BaseProvider, ChatOllama):
8+
id = "ollama"
9+
name = "Ollama"
10+
model_id_key = "model"
11+
help = (
12+
"See [https://www.ollama.com/library](https://www.ollama.com/library) for a list of models. "
13+
"Pass a model's name; for example, `deepseek-coder-v2`."
14+
)
15+
models = ["*"]
16+
registry = True
17+
fields = [
18+
TextField(key="base_url", label="Base API URL (optional)", format="text"),
19+
]
20+
21+
22+
class OllamaEmbeddingsProvider(BaseEmbeddingsProvider, OllamaEmbeddings):
23+
id = "ollama"
24+
name = "Ollama"
25+
# source: https://ollama.com/library
26+
models = [
27+
"nomic-embed-text",
28+
"mxbai-embed-large",
29+
"all-minilm",
30+
"snowflake-arctic-embed",
31+
]
32+
model_id_key = "model"

packages/jupyter-ai-magics/jupyter_ai_magics/providers.py

Lines changed: 1 addition & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -29,13 +29,7 @@
2929
from langchain.schema.output_parser import StrOutputParser
3030
from langchain.schema.runnable import Runnable
3131
from langchain_community.chat_models import QianfanChatEndpoint
32-
from langchain_community.llms import (
33-
AI21,
34-
GPT4All,
35-
HuggingFaceEndpoint,
36-
Ollama,
37-
Together,
38-
)
32+
from langchain_community.llms import AI21, GPT4All, HuggingFaceEndpoint, Together
3933
from langchain_core.language_models.chat_models import BaseChatModel
4034
from langchain_core.language_models.llms import BaseLLM
4135

@@ -707,21 +701,6 @@ async def _acall(self, *args, **kwargs) -> Coroutine[Any, Any, str]:
707701
return await self._call_in_executor(*args, **kwargs)
708702

709703

710-
class OllamaProvider(BaseProvider, Ollama):
711-
id = "ollama"
712-
name = "Ollama"
713-
model_id_key = "model"
714-
help = (
715-
"See [https://www.ollama.com/library](https://www.ollama.com/library) for a list of models. "
716-
"Pass a model's name; for example, `deepseek-coder-v2`."
717-
)
718-
models = ["*"]
719-
registry = True
720-
fields = [
721-
TextField(key="base_url", label="Base API URL (optional)", format="text"),
722-
]
723-
724-
725704
class TogetherAIProvider(BaseProvider, Together):
726705
id = "togetherai"
727706
name = "Together AI"

packages/jupyter-ai-magics/pyproject.toml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ all = [
4848
"langchain_mistralai",
4949
"langchain_nvidia_ai_endpoints",
5050
"langchain_openai",
51+
"langchain_ollama",
5152
"pillow",
5253
"boto3",
5354
"qianfan",
@@ -61,7 +62,7 @@ anthropic-chat = "jupyter_ai_magics.partner_providers.anthropic:ChatAnthropicPro
6162
cohere = "jupyter_ai_magics.partner_providers.cohere:CohereProvider"
6263
gpt4all = "jupyter_ai_magics:GPT4AllProvider"
6364
huggingface_hub = "jupyter_ai_magics:HfHubProvider"
64-
ollama = "jupyter_ai_magics:OllamaProvider"
65+
ollama = "jupyter_ai_magics.partner_providers.ollama:OllamaProvider"
6566
openai = "jupyter_ai_magics.partner_providers.openai:OpenAIProvider"
6667
openai-chat = "jupyter_ai_magics.partner_providers.openai:ChatOpenAIProvider"
6768
azure-chat-openai = "jupyter_ai_magics.partner_providers.openai:AzureChatOpenAIProvider"
@@ -83,7 +84,7 @@ cohere = "jupyter_ai_magics.partner_providers.cohere:CohereEmbeddingsProvider"
8384
mistralai = "jupyter_ai_magics.partner_providers.mistralai:MistralAIEmbeddingsProvider"
8485
gpt4all = "jupyter_ai_magics:GPT4AllEmbeddingsProvider"
8586
huggingface_hub = "jupyter_ai_magics:HfHubEmbeddingsProvider"
86-
ollama = "jupyter_ai_magics:OllamaEmbeddingsProvider"
87+
ollama = "jupyter_ai_magics.partner_providers.ollama:OllamaEmbeddingsProvider"
8788
openai = "jupyter_ai_magics.partner_providers.openai:OpenAIEmbeddingsProvider"
8889
qianfan = "jupyter_ai_magics:QianfanEmbeddingsEndpointProvider"
8990

0 commit comments

Comments
 (0)