Skip to content

Commit b5f0359

Browse files
authored
Merge pull request #115 from UiPath/feature/llm_agent_hub
agenthub: use llms from agenthub
2 parents c24c5b0 + aa88b7a commit b5f0359

File tree

6 files changed

+31
-258
lines changed

6 files changed

+31
-258
lines changed

pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
[project]
22
name = "uipath-langchain"
3-
version = "0.0.116"
3+
version = "0.0.117"
44
description = "UiPath Langchain"
55
readme = { file = "README.md", content-type = "text/markdown" }
66
requires-python = ">=3.10"
77
dependencies = [
8-
"uipath>=2.0.77, <2.1.0",
8+
"uipath>=2.0.79, <2.1.0",
99
"langgraph>=0.2.70",
1010
"langchain-core>=0.3.34",
1111
"langgraph-checkpoint-sqlite>=2.0.3",

src/uipath_langchain/_utils/_request_mixin.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,8 @@ class Config:
4040

4141
default_headers: Optional[Mapping[str, str]] = {
4242
"X-UiPath-Streaming-Enabled": "false",
43+
"X-UiPath-JobKey": os.getenv("UIPATH_JOB_KEY", ""),
44+
"X-UiPath-ProcessKey": os.getenv("UIPATH_PROCESS_KEY", ""),
4345
}
4446
model_name: Optional[str] = Field(
4547
default_factory=lambda: os.getenv("UIPATH_MODEL_NAME", "gpt-4o-2024-08-06"),

src/uipath_langchain/_utils/_settings.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
# mypy: disable-error-code="syntax"
22
import os
3-
from enum import Enum
43
from typing import Any, Optional
54

65
import httpx
@@ -47,14 +46,6 @@ class UiPathClientSettings(BaseSettings):
4746
action_id: str = Field(default="DefaultActionId", alias="UIPATH_ACTION_ID")
4847

4948

50-
class UiPathEndpoints(Enum):
51-
NORMALIZED_COMPLETION_ENDPOINT = "llmgateway_/api/chat/completions"
52-
PASSTHROUGH_COMPLETION_ENDPOINT = "llmgateway_/openai/deployments/{model}/chat/completions?api-version={api_version}"
53-
EMBEDDING_ENDPOINT = (
54-
"llmgateway_/openai/deployments/{model}/embeddings?api-version={api_version}"
55-
)
56-
57-
5849
def get_uipath_token_header(
5950
settings: Any = None,
6051
) -> str:

src/uipath_langchain/chat/models.py

Lines changed: 22 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
11
import json
2+
import logging
3+
from os import environ as env
24
from typing import Any, Dict, List, Literal, Optional, Union
35

46
from langchain_core.callbacks import (
@@ -12,9 +14,11 @@
1214
from langchain_core.runnables import Runnable
1315
from langchain_openai.chat_models import AzureChatOpenAI
1416
from pydantic import BaseModel
17+
from uipath.utils import EndpointManager
1518

1619
from uipath_langchain._utils._request_mixin import UiPathRequestMixin
17-
from uipath_langchain._utils._settings import UiPathEndpoints
20+
21+
logger = logging.getLogger(__name__)
1822

1923

2024
class UiPathAzureChatOpenAI(UiPathRequestMixin, AzureChatOpenAI):
@@ -71,14 +75,25 @@ def with_structured_output(
7175

7276
@property
7377
def endpoint(self) -> str:
74-
return UiPathEndpoints.PASSTHROUGH_COMPLETION_ENDPOINT.value.format(
78+
endpoint = EndpointManager.get_passthrough_endpoint()
79+
logger.debug("Using endpoint: %s", endpoint)
80+
return endpoint.format(
7581
model=self.model_name, api_version=self.openai_api_version
7682
)
7783

7884

7985
class UiPathChat(UiPathRequestMixin, AzureChatOpenAI):
8086
"""Custom LLM connector for LangChain integration with UiPath Normalized."""
8187

88+
def __init__(self, *args: Any, **kwargs: Any):
89+
"""Initialize the UiPath Azure Chat OpenAI model."""
90+
91+
super().__init__(*args, **kwargs)
92+
self.default_headers = {
93+
"X-UiPath-JobKey": env.get("UIPATH_JOB_KEY", ""),
94+
"X-UiPath-ProcessKey": env.get("UIPATH_PROCESS_KEY", ""),
95+
}
96+
8297
def _create_chat_result(
8398
self,
8499
response: Union[Dict[str, Any], BaseModel],
@@ -252,7 +267,11 @@ def with_structured_output(
252267

253268
@property
254269
def endpoint(self) -> str:
255-
return UiPathEndpoints.NORMALIZED_COMPLETION_ENDPOINT.value
270+
endpoint = EndpointManager.get_passthrough_endpoint()
271+
logger.debug("Using endpoint: %s", endpoint)
272+
return endpoint.format(
273+
model=self.model_name, api_version=self.openai_api_version
274+
)
256275

257276
@property
258277
def is_normalized(self) -> bool:

src/uipath_langchain/embeddings/embeddings.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,9 @@
55
from langchain_community.callbacks.manager import openai_callback_var
66
from langchain_openai.embeddings import AzureOpenAIEmbeddings, OpenAIEmbeddings
77
from pydantic import Field
8+
from uipath.utils import EndpointManager
89

910
from uipath_langchain._utils._request_mixin import UiPathRequestMixin
10-
from uipath_langchain._utils._settings import UiPathEndpoints
1111

1212

1313
class UiPathAzureOpenAIEmbeddings(UiPathRequestMixin, AzureOpenAIEmbeddings):
@@ -43,7 +43,8 @@ def __init__(self, **kwargs):
4343

4444
@property
4545
def endpoint(self) -> str:
46-
return UiPathEndpoints.EMBEDDING_ENDPOINT.value.format(
46+
endpoint = EndpointManager.get_embeddings_endpoint()
47+
return endpoint.format(
4748
model=self.model_name, api_version=self.openai_api_version
4849
)
4950

@@ -105,6 +106,7 @@ async def aembed_documents(
105106

106107
@property
107108
def endpoint(self) -> str:
108-
return UiPathEndpoints.EMBEDDING_ENDPOINT.value.format(
109+
endpoint = EndpointManager.get_embeddings_endpoint()
110+
return endpoint.format(
109111
model=self.model_name, api_version=self.openai_api_version
110112
)

tests/test_langchain_client.py

Lines changed: 0 additions & 241 deletions
This file was deleted.

0 commit comments

Comments
 (0)