@@ -132,14 +132,17 @@ def test_flow_decorator_populates_output_message(
132132 try :
133133 humanloop_client = get_humanloop_client ()
134134
135+ # GIVEN a flow that returns a ChatMessage like dict
135136 @humanloop_client .flow (path = f"{ sdk_test_dir } /test_flow_log_output_message" )
136137 def my_flow (question : str ) -> dict [str , Any ]:
137138 return {"role" : "user" , "content" : question }
138139
140+ # WHEN the flow is called
139141 assert "france" in my_flow ("What is the capital of the France?" )["content" ].lower ()
140142
141143 time .sleep (5 )
142144
145+ # THEN the Flow is created and the Log has output_message populated
143146 flow_response = humanloop_client .files .retrieve_by_path (path = f"{ sdk_test_dir } /test_flow_log_output_message" )
144147 assert flow_response is not None
145148 flow_logs_response = humanloop_client .logs .list (file_id = flow_response .id , page = 1 , size = 50 )
@@ -161,12 +164,13 @@ async def test_async_flow_decorator(
161164):
162165 humanloop_client = get_humanloop_client ()
163166
167+ # GIVEN an async flow that returns a string
164168 @humanloop_client .a_flow (path = f"{ sdk_test_dir } /test_async_flow" )
165169 async def my_flow (question : str ) -> str :
166170 return "baz!"
167171
168- # THEN the output is the one expected
169- assert "baz!" in await my_flow ("test" )
172+ # WHEN the flow is called
173+ assert "baz!" == await my_flow ("test" )
170174
171175 # Wait for the flow and log to propagate to integration backend
172176 await asyncio .sleep (3 )
@@ -191,12 +195,15 @@ async def test_async_tool_decorator(
191195):
192196 humanloop_client = get_humanloop_client ()
193197
198+ # GIVEN an async tool that returns a string
194199 @humanloop_client .a_tool (path = f"{ sdk_test_dir } /test_async_tool" )
195200 async def my_tool (question : str ) -> str :
196201 return "baz!"
197202
203+ # THEN the tool has a json_schema
198204 assert hasattr (my_tool , "json_schema" )
199205
206+ # WHEN the tool is called
200207 await my_tool ("test" )
201208
202209 # Wait for the flow and log to propagate to integration backend
@@ -223,6 +230,7 @@ async def test_async_prompt_decorator(
223230):
224231 humanloop_client = get_humanloop_client ()
225232
233+ # GIVEN an async prompt that calls OpenAI
226234 @humanloop_client .a_prompt (path = f"{ sdk_test_dir } /test_async_prompt" )
227235 async def my_prompt (question : str ) -> str :
228236 openai_client = AsyncOpenAI (api_key = openai_key )
@@ -235,7 +243,8 @@ async def my_prompt(question: str) -> str:
235243 assert response .choices [0 ].message .content is not None
236244 return response .choices [0 ].message .content
237245
238- # THEN the output is the one expected
246+ # WHEN the prompt is called
247+ # THEN the output is not null
239248 output = await my_prompt ("What is the capital of the France?" )
240249 assert output is not None
241250
@@ -249,6 +258,7 @@ async def my_prompt(question: str) -> str:
249258 # THEN a Log exists on the File
250259 prompt_logs_response = humanloop_client .logs .list (file_id = prompt_file_response .id , page = 1 , size = 50 )
251260 assert prompt_logs_response .items is not None and len (prompt_logs_response .items ) == 1
261+ # THEN output_message matches the one intercepted from OpenAI response
252262 assert prompt_logs_response .items [0 ].output_message .content == output # type: ignore [union-attr]
253263
254264
@@ -259,6 +269,7 @@ async def test_async_flow_decorator_with_trace(
259269):
260270 humanloop_client = get_humanloop_client ()
261271
272+ # GIVEN async flow and prompt decorators
262273 @humanloop_client .a_prompt (path = f"{ sdk_test_dir } /test_async_prompt_with_trace" )
263274 async def my_prompt (question : str ) -> str :
264275 openai_client = AsyncOpenAI (api_key = openai_key )
@@ -273,20 +284,26 @@ async def my_prompt(question: str) -> str:
273284
274285 @humanloop_client .a_flow (path = f"{ sdk_test_dir } /test_async_flow_with_trace" )
275286 async def my_flow (question : str ) -> str :
276- return await my_prompt ("test" )
287+ return await my_prompt (question = "test" )
277288
289+ # WHEN the flow is called
278290 await my_flow ("test" )
279291
280292 # Wait for the flow and log to propagate to integration backend
281293 await asyncio .sleep (3 )
282294
295+ # THEN both files exist on Humanloop
283296 flow_file_response = humanloop_client .files .retrieve_by_path (path = f"{ sdk_test_dir } /test_async_flow_with_trace" )
284297 assert flow_file_response is not None
298+ prompt_file_response = humanloop_client .files .retrieve_by_path (path = f"{ sdk_test_dir } /test_async_prompt_with_trace" )
299+ assert prompt_file_response is not None
285300
301+ # THEN a Log exists on the File
286302 flow_logs_response = humanloop_client .logs .list (file_id = flow_file_response .id , page = 1 , size = 50 )
287303 assert flow_logs_response .items is not None and len (flow_logs_response .items ) == 1
288304 assert flow_logs_response .items [0 ].output is not None and flow_logs_response .items [0 ].inputs == {"question" : "test" }
289305 flow_log_with_trace_response = humanloop_client .logs .get (id = flow_logs_response .items [0 ].id )
306+ # THEN a Prompt Log is added to the Flow Log trace
290307 assert (
291308 flow_log_with_trace_response ["trace_children" ] is not None # type: ignore [index]
292309 and len (flow_log_with_trace_response ["trace_children" ]) == 1 # type: ignore [index]
0 commit comments