Skip to content

Commit b888160

Browse files
committed
chore: add RAG sample
1 parent 4d0ee52 commit b888160

File tree

8 files changed

+6596
-0
lines changed

8 files changed

+6596
-0
lines changed

samples/RAG-sample/langgraph.json

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
{
2+
"dependencies": ["."],
3+
"graphs": {
4+
"researcher-and-uploader-agent": "./src/agents/researcher-and-uploader.py:graph",
5+
"quiz-generator-RAG-agent": "./src/agents/quiz-generator-RAG.py:graph"
6+
},
7+
"env": ".env"
8+
}

samples/RAG-sample/pyproject.toml

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
[project]
2+
name = "RAG-agents"
3+
version = "0.0.6"
4+
description = "Package containing 2 agents. The first one crawls the internet and adds relevant information to a storage bucket, the first one generates quizzes based on the gathered info and user input."
5+
authors = [
6+
{ name = "Radu Mocanu" }
7+
]
8+
requires-python = ">=3.10"
9+
dependencies = [
10+
"langgraph>=0.2.55",
11+
"langchain-community>=0.3.9",
12+
"langchain-anthropic>=0.3.8",
13+
"langchain-experimental>=0.3.4",
14+
"tavily-python>=0.5.0",
15+
"uipath==2.0.1",
16+
"uipath-langchain==0.0.87"
17+
]
18+
19+
[project.optional-dependencies]
20+
dev = ["mypy>=1.11.1", "ruff>=0.6.1"]
21+
22+
[build-system]
23+
requires = ["setuptools>=73.0.0", "wheel"]
24+
build-backend = "setuptools.build_meta"
25+
26+
[tool.setuptools.package-data]
27+
"*" = ["py.typed"]
28+
29+
[tool.ruff]
30+
lint.select = [
31+
"E", # pycodestyle
32+
"F", # pyflakes
33+
"I", # isort
34+
"D", # pydocstyle
35+
"D401", # First line should be in imperative mood
36+
"T201",
37+
"UP",
38+
]
39+
lint.ignore = [
40+
"UP006",
41+
"UP007",
42+
"UP035",
43+
"D417",
44+
"E501",
45+
]
46+
47+
[tool.ruff.lint.per-file-ignores]
48+
"tests/*" = ["D", "UP"]
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
---
2+
config:
3+
flowchart:
4+
curve: linear
5+
---
6+
graph TD;
7+
__start__([<p>__start__</p>]):::first
8+
invoke_researcher(invoke_researcher)
9+
create_quiz(create_quiz)
10+
__end__([<p>__end__</p>]):::last
11+
__start__ --> invoke_researcher;
12+
create_quiz --> __end__;
13+
invoke_researcher --> create_quiz;
14+
classDef default fill:#f2f0ff,line-height:1.2
15+
classDef first fill-opacity:0
16+
classDef last fill:#bfb6fc
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
---
2+
config:
3+
flowchart:
4+
curve: linear
5+
---
6+
graph TD;
7+
__start__([<p>__start__</p>]):::first
8+
upload_to_bucket(upload_to_bucket)
9+
prepare_input(prepare_input)
10+
__end__([<p>__end__</p>]):::last
11+
__start__ --> prepare_input;
12+
prepare_input --> researcher___start__;
13+
researcher___end__ --> upload_to_bucket;
14+
upload_to_bucket --> __end__;
15+
subgraph researcher
16+
researcher___start__(<p>__start__</p>)
17+
researcher_agent(agent)
18+
researcher_tools(tools)
19+
researcher___end__(<p>__end__</p>)
20+
researcher___start__ --> researcher_agent;
21+
researcher_tools --> researcher_agent;
22+
researcher_agent -.-> researcher_tools;
23+
researcher_agent -.-> researcher___end__;
24+
end
25+
classDef default fill:#f2f0ff,line-height:1.2
26+
classDef first fill-opacity:0
27+
classDef last fill:#bfb6fc
Lines changed: 128 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,128 @@
1+
from typing import Optional
2+
from langgraph.graph import END, START, MessagesState, StateGraph
3+
from langgraph.types import Command, interrupt
4+
from pydantic import BaseModel, Field
5+
from uipath import UiPath
6+
from langchain_openai import AzureChatOpenAI
7+
from langchain_core.output_parsers import PydanticOutputParser
8+
import logging
9+
import os
10+
import time
11+
from uipath._models import InvokeProcess, IngestionInProgressException
12+
from langchain_core.messages import HumanMessage, SystemMessage
13+
14+
logger = logging.getLogger(__name__)
15+
16+
llm = AzureChatOpenAI(
17+
azure_deployment="gpt-4o-mini",
18+
api_key=os.getenv("AZURE_OPENAI_API_KEY"),
19+
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
20+
api_version="2024-10-21"
21+
)
22+
23+
class Quiz(BaseModel):
24+
question: str = Field(
25+
description="One quiz question"
26+
)
27+
difficulty: float = Field(
28+
description="How difficult is the question", ge=0.0, le=1.0
29+
)
30+
answer: str = Field(
31+
description="The expected answer to the question",
32+
)
33+
34+
output_parser = PydanticOutputParser(pydantic_object=Quiz)
35+
36+
system_message ="""You are a quiz generator. Generate a quiz based on the user input. Your only context is the following one:
37+
38+
{context}
39+
40+
{format_instructions}
41+
42+
Respond with the classification in the requested JSON format."""
43+
44+
uipath = UiPath()
45+
46+
47+
class GraphOutput(BaseModel):
48+
quiz: Quiz
49+
50+
class GraphInput(MessagesState):
51+
general_category: str
52+
quiz_topic: str
53+
bucket_name: str
54+
index_name: str
55+
bucket_folder: Optional[str]
56+
# create_bucket: Optional[bool]
57+
58+
class GraphState(MessagesState):
59+
quiz_topic: str
60+
bucket_name: str
61+
bucket_folder: Optional[str]
62+
index_name: str
63+
64+
# def decide_next_node(state: GraphState) -> Literal["get_context", "create_bucket"]:
65+
# if state.create_bucket:
66+
# return "create_bucket"
67+
# return "get_context"
68+
#
69+
# def create_bucket(state: GraphState) -> GraphState:
70+
# # to be implemented after sdk support for bucket creation is added
71+
72+
async def invoke_researcher(state: GraphInput) -> Command:
73+
state["messages"].append(HumanMessage(f"Fetch data about {state['general_category']}")),
74+
input_args_json = {
75+
"messages": state["messages"],
76+
"bucket_name": state["bucket_name"],
77+
"bucket_folder": state.get("bucket_folder", None),
78+
}
79+
agent_response = interrupt(InvokeProcess(
80+
name = "researcher-and-uploader-agent",
81+
input_arguments = input_args_json,
82+
))
83+
quiz_topic = state["quiz_topic"]
84+
return Command(
85+
update={
86+
"messages": [agent_response["messages"][-1], ("user", f"create a quiz about {quiz_topic}")],
87+
})
88+
89+
async def create_quiz(state: GraphState) -> GraphOutput:
90+
# retriever = ContextGroundingRetriever(
91+
# index_name=state["index_name"],
92+
# uipath_sdk=uipath,
93+
# number_of_results=10
94+
# )
95+
# retriever._get_relevant_documents(state["quiz_topic"], run_manager = CallbackManagerForRetrieverRun())
96+
no_of_retries = 5
97+
context_data = None
98+
index = uipath.context_grounding.get_or_create_index(state["index_name"],storage_bucket_name=state["bucket_name"])
99+
uipath.context_grounding.ingest_data(index)
100+
while no_of_retries != 0:
101+
try:
102+
context_data = uipath.context_grounding.search(state["index_name"], state["quiz_topic"], 10)
103+
break
104+
except IngestionInProgressException as ex:
105+
logger.info(ex.message)
106+
no_of_retries -= 1
107+
logger.info(f"{no_of_retries} retries left")
108+
time.sleep(5)
109+
if not context_data:
110+
raise Exception("Ingestion is taking too long!")
111+
112+
state["messages"].append(SystemMessage(system_message.format(format_instructions=output_parser.get_format_instructions(),
113+
context= context_data,)))
114+
result = llm.invoke(state["messages"])
115+
return GraphOutput( quiz=output_parser.parse(result.content))
116+
117+
118+
# Build the state graph
119+
builder = StateGraph(input=GraphInput, output=GraphOutput)
120+
builder.add_node("invoke_researcher", invoke_researcher)
121+
builder.add_node("create_quiz", create_quiz)
122+
123+
builder.add_edge(START, "invoke_researcher")
124+
builder.add_edge("invoke_researcher", "create_quiz")
125+
builder.add_edge("create_quiz", END)
126+
127+
# Compile the graph
128+
graph = builder.compile()
Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
from typing import Optional
2+
import time
3+
from langchain_anthropic import ChatAnthropic
4+
from langchain_community.tools.tavily_search import TavilySearchResults
5+
from langgraph.graph import END, START, MessagesState, StateGraph
6+
from langgraph.prebuilt import create_react_agent
7+
from langgraph.types import Command
8+
from uipath import UiPath
9+
from langchain_core.messages import HumanMessage, SystemMessage
10+
11+
12+
tavily_tool = TavilySearchResults(max_results=5)
13+
14+
llm = ChatAnthropic(model="claude-3-5-sonnet-latest")
15+
16+
uipath = UiPath()
17+
research_agent = create_react_agent(
18+
llm, tools=[tavily_tool], prompt="You are a researcher. Search relevant information given the user topic."
19+
)
20+
21+
class GraphInput(MessagesState):
22+
bucket_name: str
23+
bucket_folder: Optional[str]
24+
25+
class GraphState(MessagesState):
26+
web_results: str
27+
bucket_name: str
28+
bucket_folder: Optional[str]
29+
30+
def prepare_input(state: GraphInput) -> GraphState:
31+
return GraphState(
32+
messages=state["messages"],
33+
web_results="",
34+
bucket_name=state["bucket_name"],
35+
bucket_folder=state.get("bucket_folder",None),
36+
)
37+
38+
def research_node(state: GraphState) -> Command:
39+
result = research_agent.invoke(state)
40+
web_results = result["messages"][-1].content
41+
return Command(
42+
update={
43+
"web_results": web_results,
44+
})
45+
46+
def upload_to_bucket(state: GraphState) -> MessagesState:
47+
#TODO: also include a question summarization as document name
48+
current_timestamp = int(time.time())
49+
uipath.buckets.upload_from_memory(
50+
bucket_name=state["bucket_name"],
51+
blob_file_path=f"{current_timestamp}.txt",
52+
content_type="application/txt",
53+
content=state["web_results"],)
54+
return MessagesState(messages=[HumanMessage("Relevant information uploaded to bucket.")])
55+
56+
57+
# Build the state graph
58+
builder = StateGraph(input=GraphInput, output=MessagesState)
59+
builder.add_node("researcher", research_node)
60+
builder.add_node("upload_to_bucket", upload_to_bucket)
61+
builder.add_node("prepare_input", prepare_input)
62+
63+
builder.add_edge(START, "prepare_input")
64+
builder.add_edge("prepare_input", "researcher")
65+
builder.add_edge("researcher", "upload_to_bucket")
66+
builder.add_edge("upload_to_bucket", END)
67+
68+
# Compile the graph
69+
graph = builder.compile()

0 commit comments

Comments
 (0)