diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/vertexai.py b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/vertexai.py new file mode 100644 index 000000000..48eec9763 --- /dev/null +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/vertexai.py @@ -0,0 +1,38 @@ +from jupyter_ai_magics.base_provider import BaseProvider +from langchain_google_vertexai import VertexAI, VertexAIEmbeddings + + +class VertexAIProvider(BaseProvider, VertexAI): + id = "vertexai" + name = "Vertex AI" + models = [ + "gemini-2.5-pro", + "gemini-2.5-flash", + ] + model_id_key = "model" + auth_strategy = None + pypi_package_deps = ["langchain-google-vertexai"] + help = ( + "To use Vertex AI Generative AI you must have the langchain-google-vertexai Python package installed and either:\n\n" + "- Have credentials configured for your environment (gcloud, workload identity, etc...)\n" + "- Store the path to a service account JSON file as the GOOGLE_APPLICATION_CREDENTIALS environment variable\n\n" + "This codebase uses the google.auth library which first looks for the application credentials variable mentioned above, and then looks for system-level auth. " + "For more information, see the [Vertex AI authentication documentation](https://python.langchain.com/docs/integrations/llms/google_vertex_ai_palm/)." + ) + +class VertexAIEmbeddingsProvider(BaseProvider, VertexAIEmbeddings): + id = "vertexai" + name = "Vertex AI" + models = [ + "text-embedding-004", + ] + model_id_key = "model" + auth_strategy = None + pypi_package_deps = ["langchain-google-vertexai"] + help = ( + "To use Vertex AI Generative AI you must have the langchain-google-vertexai Python package installed and either:\n\n" + "- Have credentials configured for your environment (gcloud, workload identity, etc...)\n" + "- Store the path to a service account JSON file as the GOOGLE_APPLICATION_CREDENTIALS environment variable\n\n" + "This codebase uses the google.auth library which first looks for the application credentials variable mentioned above, and then looks for system-level auth. " + "For more information, see the [Vertex AI authentication documentation](https://python.langchain.com/docs/integrations/llms/google_vertex_ai_palm/)." + ) \ No newline at end of file diff --git a/packages/jupyter-ai-magics/pyproject.toml b/packages/jupyter-ai-magics/pyproject.toml index 694eabf6d..51cf1af18 100644 --- a/packages/jupyter-ai-magics/pyproject.toml +++ b/packages/jupyter-ai-magics/pyproject.toml @@ -50,8 +50,14 @@ all = [ "huggingface_hub", "ipywidgets", "langchain_anthropic", - "langchain_aws", - "langchain_cohere", + # `langchain_aws<0.2` is not compatible with LangChain v0.3 + "langchain_aws>=0.2,<0.3", + # `langchain_cohere<0.3` is not compatible with LangChain v0.3 + "langchain_cohere>=0.3", + # Pin cohere to <5.16 to prevent langchain_cohere from breaking as it uses ChatResponse removed in cohere 5.16.0 + # cohere>=5.5.6 is implied by langchain_cohere>=0.3 + # TODO: remove cohere pin when langchain_cohere is updated to work with cohere >=5.16 + "cohere>=5.5.6,<5.16", "langchain_google_genai", "langchain_mistralai", "langchain_nvidia_ai_endpoints", @@ -61,6 +67,7 @@ all = [ "boto3", "qianfan", "together", + "langchain-google-vertexai", ] [project.entry-points."jupyter_ai.model_providers"] @@ -84,6 +91,7 @@ together-ai = "jupyter_ai_magics:TogetherAIProvider" gemini = "jupyter_ai_magics.partner_providers.gemini:GeminiProvider" mistralai = "jupyter_ai_magics.partner_providers.mistralai:MistralAIProvider" openrouter = "jupyter_ai_magics.partner_providers.openrouter:OpenRouterProvider" +vertexai = "jupyter_ai_magics.partner_providers.vertexai:VertexAIProvider" [project.entry-points."jupyter_ai.embeddings_model_providers"] azure = "jupyter_ai_magics.partner_providers.openai:AzureOpenAIEmbeddingsProvider" @@ -96,6 +104,7 @@ ollama = "jupyter_ai_magics.partner_providers.ollama:OllamaEmbeddingsProvider" openai = "jupyter_ai_magics.partner_providers.openai:OpenAIEmbeddingsProvider" openai-custom = "jupyter_ai_magics.partner_providers.openai:OpenAIEmbeddingsCustomProvider" qianfan = "jupyter_ai_magics:QianfanEmbeddingsEndpointProvider" +vertexai = "jupyter_ai_magics.partner_providers.vertexai:VertexAIEmbeddingsProvider" [tool.hatch.version] source = "nodejs"