Skip to content

Commit f1f607e

Browse files
Refactor tracing integration to use updated Openlayer functions
Co-authored-by: vinicius <[email protected]>
1 parent 742a74f commit f1f607e

File tree

1 file changed

+22
-22
lines changed

1 file changed

+22
-22
lines changed

examples/tracing/trace_metadata_updates.py

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
os.environ["OPENLAYER_API_KEY"] = "your-api-key-here"
1616
os.environ["OPENLAYER_INFERENCE_PIPELINE_ID"] = "your-pipeline-id-here"
1717

18-
import openlayer
18+
from openlayer.lib import trace, trace_async, update_current_trace, update_current_span
1919

2020

2121
class UserSession:
@@ -34,15 +34,15 @@ class ChatApplication:
3434
def __init__(self):
3535
self.active_sessions: Dict[str, UserSession] = {}
3636

37-
@openlayer.trace()
37+
@trace()
3838
def handle_user_request(self, request_text: str, session_token: str) -> str:
3939
"""Main request handler that dynamically sets trace metadata."""
4040

4141
# Get user session (this info isn't available as function arguments)
4242
user_session = self.get_user_session(session_token)
4343

4444
# Set trace-level metadata with user context
45-
openlayer.update_current_trace(
45+
update_current_trace(
4646
name=f"chat_request_{user_session.user_id}",
4747
user_id=user_session.user_id,
4848
tags=["chat", "user_request", user_session.preferences.get("tier", "free")],
@@ -62,7 +62,7 @@ def handle_user_request(self, request_text: str, session_token: str) -> str:
6262
final_response = self.postprocess_response(response, user_session)
6363

6464
# Update trace with final output
65-
openlayer.update_current_trace(
65+
update_current_trace(
6666
output={"response": final_response, "processing_time": "0.5s"},
6767
metadata={
6868
"response_length": len(final_response),
@@ -73,12 +73,12 @@ def handle_user_request(self, request_text: str, session_token: str) -> str:
7373
user_session.interaction_count += 1
7474
return final_response
7575

76-
@openlayer.trace()
76+
@trace()
7777
def preprocess_request(self, text: str, user_session: UserSession) -> str:
7878
"""Preprocess user request with step-level metadata."""
7979

8080
# Update current step with preprocessing context
81-
openlayer.update_current_span(
81+
update_current_span(
8282
metadata={
8383
"preprocessing_type": "standard",
8484
"user_preferences_applied": True,
@@ -97,14 +97,14 @@ def preprocess_request(self, text: str, user_session: UserSession) -> str:
9797

9898
return processed
9999

100-
@openlayer.trace()
100+
@trace()
101101
def generate_response(self, processed_text: str, user_session: UserSession) -> str:
102102
"""Generate AI response with model metadata."""
103103

104104
# Set model-specific metadata
105105
model_version = "gpt-4" if user_session.preferences.get("tier") == "premium" else "gpt-3.5-turbo"
106106

107-
openlayer.update_current_span(
107+
update_current_span(
108108
metadata={
109109
"model_used": model_version,
110110
"temperature": 0.7,
@@ -127,11 +127,11 @@ def generate_response(self, processed_text: str, user_session: UserSession) -> s
127127

128128
return response
129129

130-
@openlayer.trace()
130+
@trace()
131131
def postprocess_response(self, response: str, user_session: UserSession) -> str:
132132
"""Postprocess response with personalization metadata."""
133133

134-
openlayer.update_current_span(
134+
update_current_span(
135135
metadata={
136136
"personalization_applied": True,
137137
"content_filtering": user_session.preferences.get("content_filter", "moderate"),
@@ -170,12 +170,12 @@ def make_formal(self, text: str) -> str:
170170
return text.replace("can't", "cannot").replace("won't", "will not")
171171

172172

173-
@openlayer.trace()
173+
@trace()
174174
def batch_processing_example():
175175
"""Example showing batch processing with trace metadata updates."""
176176

177177
# Set trace metadata for batch job
178-
openlayer.update_current_trace(
178+
update_current_trace(
179179
name="batch_user_requests",
180180
tags=["batch", "processing", "multiple_users"],
181181
metadata={
@@ -199,15 +199,15 @@ def batch_processing_example():
199199
results.append(result)
200200

201201
# Update batch progress
202-
openlayer.update_current_trace(
202+
update_current_trace(
203203
metadata={
204204
"requests_processed": i + 1,
205205
"progress_percentage": ((i + 1) / len(test_requests)) * 100
206206
}
207207
)
208208

209209
# Update final batch metadata
210-
openlayer.update_current_trace(
210+
update_current_trace(
211211
output={"batch_results": results, "total_processed": len(results)},
212212
metadata={
213213
"processing_complete": True,
@@ -219,18 +219,18 @@ def batch_processing_example():
219219
return results
220220

221221

222-
@openlayer.trace()
222+
@trace()
223223
def error_handling_example():
224224
"""Example showing error handling with trace metadata."""
225225

226-
openlayer.update_current_trace(
226+
update_current_trace(
227227
name="error_handling_demo",
228228
metadata={"expected_behavior": "demonstrate error tracing"}
229229
)
230230

231231
try:
232232
# Simulate some processing
233-
openlayer.update_current_span(
233+
update_current_span(
234234
metadata={"processing_step": "initial_validation"}
235235
)
236236

@@ -239,7 +239,7 @@ def error_handling_example():
239239

240240
except ValueError as e:
241241
# Update trace with error information
242-
openlayer.update_current_trace(
242+
update_current_trace(
243243
metadata={
244244
"error_occurred": True,
245245
"error_type": type(e).__name__,
@@ -252,11 +252,11 @@ def error_handling_example():
252252
return f"Error handled: {str(e)}"
253253

254254

255-
@openlayer.trace_async()
255+
@trace_async()
256256
async def async_example():
257257
"""Example showing async trace metadata updates."""
258258

259-
openlayer.update_current_trace(
259+
update_current_trace(
260260
name="async_processing",
261261
metadata={"execution_mode": "async"},
262262
tags=["async", "demo"]
@@ -265,12 +265,12 @@ async def async_example():
265265
# Simulate async processing steps
266266
import asyncio
267267

268-
openlayer.update_current_span(
268+
update_current_span(
269269
metadata={"step": "async_sleep_simulation"}
270270
)
271271
await asyncio.sleep(0.1)
272272

273-
openlayer.update_current_trace(
273+
update_current_trace(
274274
metadata={"async_complete": True},
275275
output="Async processing completed"
276276
)

0 commit comments

Comments
 (0)