Skip to content

Commit 18e94c9

Browse files
committed
wip
1 parent 8425c8d commit 18e94c9

File tree

7 files changed

+374
-100
lines changed

7 files changed

+374
-100
lines changed
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
{
2+
"dependencies": [
3+
"."
4+
],
5+
"graphs": {
6+
"agent": "./main.py:graph"
7+
},
8+
"env": ".env"
9+
}
Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
import random
2+
from typing import Literal
3+
4+
from langgraph.checkpoint.memory import MemorySaver
5+
from langgraph.graph import END, START, StateGraph
6+
from langgraph.types import interrupt
7+
from typing_extensions import TypedDict
8+
9+
10+
# State
11+
class State(TypedDict):
12+
graph_state: str
13+
14+
15+
# Conditional edge
16+
def decide_mood(state) -> Literal["node_2", "node_3"]:
17+
# Often, we will use state to decide on the next node to visit
18+
user_input = state["graph_state"]
19+
20+
# Here, let's just do a 50 / 50 split between nodes 2, 3
21+
if random.random() < 0.5:
22+
# 50% of the time, we return Node 2
23+
return "node_2"
24+
25+
# 50% of the time, we return Node 3
26+
return "node_3"
27+
28+
29+
# Nodes
30+
def node_1(state):
31+
print("---Node 1---")
32+
33+
return {"graph_state": state["graph_state"] + " I am"}
34+
35+
36+
def node_2(state):
37+
print("---Node 2---")
38+
return {"graph_state": state["graph_state"] + " happy!"}
39+
40+
41+
def node_3(state):
42+
print("---Node 3---")
43+
return {"graph_state": state["graph_state"] + " sad!"}
44+
45+
46+
builder = StateGraph(State)
47+
builder.add_node("node_1", node_1)
48+
builder.add_node("node_2", node_2)
49+
builder.add_node("node_3", node_3)
50+
51+
builder.add_edge(START, "node_1")
52+
builder.add_edge("node_1", "node_2")
53+
builder.add_edge("node_2", END)
54+
builder.add_edge("node_3", END)
55+
56+
57+
memory = MemorySaver()
58+
59+
graph = builder.compile(checkpointer=memory)
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
[project]
2+
name = "c-host-in-uipath"
3+
version = "0.1.0"
4+
description = "Add your description here"
5+
readme = "README.md"
6+
authors = [{ name = "Eduard Stanculet", email = "[email protected]" }]
7+
requires-python = ">=3.13"
8+
dependencies = [
9+
"langchain-anthropic>=0.3.10",
10+
"langchain-community>=0.3.21",
11+
"langgraph>=0.3.29",
12+
"tavily-python>=0.5.4",
13+
"uipath>=2.0.8",
14+
"uipath-langchain>=0.0.88",
15+
]
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
{
2+
"entryPoints": [
3+
{
4+
"filePath": "agent",
5+
"uniqueId": "dcc7a309-fbcc-4999-af4f-2a75a844b49a",
6+
"type": "agent",
7+
"input": {
8+
"type": "string",
9+
"title": "graph_state"
10+
},
11+
"output": {}
12+
}
13+
],
14+
"bindings": {
15+
"version": "2.0",
16+
"resources": []
17+
}
18+
}

tests/cli_run/test_run_sample.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
import os
2+
import sys
3+
4+
from dotenv import load_dotenv
5+
6+
from uipath_langchain._cli.cli_run import langgraph_run_middleware
7+
8+
load_dotenv()
9+
10+
11+
def test_dummy():
12+
test_folder_path = os.path.dirname(os.path.abspath(__file__))
13+
sample_path = os.path.join(test_folder_path, "samples", "1-simple-graph")
14+
15+
sys.path.append(sample_path)
16+
os.chdir(sample_path)
17+
result = langgraph_run_middleware(
18+
entrypoint=None,
19+
input='{ "graph_state": "GET Assets API does not enforce proper permissions Assets.View" }',
20+
resume=False,
21+
)
22+
23+
assert result.error_message is None

tests/conftest.py

Lines changed: 95 additions & 95 deletions
Original file line numberDiff line numberDiff line change
@@ -1,95 +1,95 @@
1-
import logging
2-
import os
3-
from os import environ as env
4-
from typing import Generator, Optional
5-
6-
import httpx
7-
import pytest
8-
from langchain.embeddings import CacheBackedEmbeddings
9-
from langchain.globals import set_llm_cache
10-
from langchain.storage import LocalFileStore
11-
from langchain_community.cache import SQLiteCache
12-
13-
from uipath_langchain.embeddings import UiPathOpenAIEmbeddings
14-
from uipath_langchain.utils._settings import UiPathCachedPathsSettings
15-
16-
test_cache_settings = UiPathCachedPathsSettings(
17-
CACHED_COMPLETION_DB="tests/llm_cache/tests_uipath_cache.sqlite",
18-
CACHED_EMBEDDINGS_DIR="tests/llm_cache/cached_embeddings",
19-
)
20-
21-
22-
def get_from_uipath_url():
23-
try:
24-
url = os.getenv("UIPATH_URL")
25-
if url:
26-
return "/".join(url.split("/", 3)[:3])
27-
except Exception:
28-
return None
29-
return None
30-
31-
32-
def get_token():
33-
url_get_token = f"{get_from_uipath_url().rstrip('/')}/identity_/connect/token"
34-
35-
os.environ["UIPATH_REQUESTING_PRODUCT"] = "uipath-python-sdk"
36-
os.environ["UIPATH_REQUESTING_FEATURE"] = "langgraph-agent"
37-
os.environ["UIPATH_TESTS_CACHE_LLMGW"] = "true"
38-
39-
token_credentials = {
40-
"client_id": env.get("UIPATH_CLIENT_ID"),
41-
"client_secret": env.get("UIPATH_CLIENT_SECRET"),
42-
"grant_type": "client_credentials",
43-
}
44-
45-
try:
46-
with httpx.Client() as client:
47-
response = client.post(url_get_token, data=token_credentials)
48-
response.raise_for_status()
49-
res_json = response.json()
50-
token = res_json.get("access_token")
51-
52-
if not token:
53-
pytest.skip("Authentication token is empty or missing")
54-
except (httpx.HTTPError, ValueError, KeyError) as e:
55-
pytest.skip(f"Failed to obtain authentication token: {str(e)}")
56-
57-
return token
58-
59-
60-
@pytest.fixture(autouse=True)
61-
def setup_test_env():
62-
env["UIPATH_ACCESS_TOKEN"] = get_token()
63-
64-
65-
@pytest.fixture(scope="session")
66-
def cached_llmgw_calls() -> Generator[Optional[SQLiteCache], None, None]:
67-
if not os.environ.get("UIPATH_TESTS_CACHE_LLMGW"):
68-
yield None
69-
else:
70-
logging.info("Setting up LLMGW cache")
71-
db_path = test_cache_settings.cached_completion_db
72-
os.makedirs(os.path.dirname(db_path), exist_ok=True)
73-
cache = SQLiteCache(database_path=db_path)
74-
set_llm_cache(cache)
75-
yield cache
76-
set_llm_cache(None)
77-
return
78-
79-
80-
@pytest.fixture(scope="session")
81-
def cached_embedder() -> Generator[Optional[CacheBackedEmbeddings], None, None]:
82-
if not os.environ.get("UIPATH_TESTS_CACHE_LLMGW"):
83-
yield None
84-
else:
85-
logging.info("Setting up embeddings cache")
86-
model = "text-embedding-3-large"
87-
embedder = CacheBackedEmbeddings.from_bytes_store(
88-
underlying_embeddings=UiPathOpenAIEmbeddings(model=model),
89-
document_embedding_cache=LocalFileStore(
90-
test_cache_settings.cached_embeddings_dir
91-
),
92-
namespace=model,
93-
)
94-
yield embedder
95-
return
1+
# import logging
2+
# import os
3+
# from os import environ as env
4+
# from typing import Generator, Optional
5+
6+
# import httpx
7+
# import pytest
8+
# from langchain.embeddings import CacheBackedEmbeddings
9+
# from langchain.globals import set_llm_cache
10+
# from langchain.storage import LocalFileStore
11+
# from langchain_community.cache import SQLiteCache
12+
13+
# from uipath_langchain.embeddings import UiPathOpenAIEmbeddings
14+
# from uipath_langchain.utils._settings import UiPathCachedPathsSettings
15+
16+
# test_cache_settings = UiPathCachedPathsSettings(
17+
# CACHED_COMPLETION_DB="tests/llm_cache/tests_uipath_cache.sqlite",
18+
# CACHED_EMBEDDINGS_DIR="tests/llm_cache/cached_embeddings",
19+
# )
20+
21+
22+
# def get_from_uipath_url():
23+
# try:
24+
# url = os.getenv("UIPATH_URL", "https://cloud.uipath.com/dummyOrg/dummyTennant/")
25+
# if url:
26+
# return "/".join(url.split("/", 3)[:3])
27+
# except Exception:
28+
# return "https://cloud.uipath.com/dummyOrg/dummyTennant/"
29+
# return None
30+
31+
32+
# def get_token():
33+
# url_get_token = f"{get_from_uipath_url().rstrip('/')}/identity_/connect/token"
34+
35+
# os.environ["UIPATH_REQUESTING_PRODUCT"] = "uipath-python-sdk"
36+
# os.environ["UIPATH_REQUESTING_FEATURE"] = "langgraph-agent"
37+
# os.environ["UIPATH_TESTS_CACHE_LLMGW"] = "true"
38+
39+
# token_credentials = {
40+
# "client_id": env.get("UIPATH_CLIENT_ID"),
41+
# "client_secret": env.get("UIPATH_CLIENT_SECRET"),
42+
# "grant_type": "client_credentials",
43+
# }
44+
45+
# try:
46+
# with httpx.Client() as client:
47+
# response = client.post(url_get_token, data=token_credentials)
48+
# response.raise_for_status()
49+
# res_json = response.json()
50+
# token = res_json.get("access_token")
51+
52+
# if not token:
53+
# pytest.skip("Authentication token is empty or missing")
54+
# except (httpx.HTTPError, ValueError, KeyError) as e:
55+
# pytest.skip(f"Failed to obtain authentication token: {str(e)}")
56+
57+
# return token
58+
59+
60+
# @pytest.fixture(autouse=True)
61+
# def setup_test_env():
62+
# env["UIPATH_ACCESS_TOKEN"] = get_token()
63+
64+
65+
# @pytest.fixture(scope="session")
66+
# def cached_llmgw_calls() -> Generator[Optional[SQLiteCache], None, None]:
67+
# if not os.environ.get("UIPATH_TESTS_CACHE_LLMGW"):
68+
# yield None
69+
# else:
70+
# logging.info("Setting up LLMGW cache")
71+
# db_path = test_cache_settings.cached_completion_db
72+
# os.makedirs(os.path.dirname(db_path), exist_ok=True)
73+
# cache = SQLiteCache(database_path=db_path)
74+
# set_llm_cache(cache)
75+
# yield cache
76+
# set_llm_cache(None)
77+
# return
78+
79+
80+
# @pytest.fixture(scope="session")
81+
# def cached_embedder() -> Generator[Optional[CacheBackedEmbeddings], None, None]:
82+
# if not os.environ.get("UIPATH_TESTS_CACHE_LLMGW"):
83+
# yield None
84+
# else:
85+
# logging.info("Setting up embeddings cache")
86+
# model = "text-embedding-3-large"
87+
# embedder = CacheBackedEmbeddings.from_bytes_store(
88+
# underlying_embeddings=UiPathOpenAIEmbeddings(model=model),
89+
# document_embedding_cache=LocalFileStore(
90+
# test_cache_settings.cached_embeddings_dir
91+
# ),
92+
# namespace=model,
93+
# )
94+
# yield embedder
95+
# return

0 commit comments

Comments
 (0)