-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy path__init__.py
More file actions
329 lines (257 loc) · 11.2 KB
/
__init__.py
File metadata and controls
329 lines (257 loc) · 11.2 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
"""Openlayer lib."""
__all__ = [
"configure",
"trace",
"trace_anthropic",
"trace_openai",
"trace_openai_assistant_thread_run",
"trace_mistral",
"trace_groq",
"trace_async_openai",
"trace_async",
"trace_bedrock",
"trace_azure_content_understanding",
"trace_oci_genai",
"trace_oci", # Alias for backward compatibility
"trace_litellm",
"trace_portkey",
"trace_google_adk",
"unpatch_google_adk",
"trace_gemini",
"update_current_trace",
"update_current_step",
# Offline buffer management functions
"replay_buffered_traces",
"get_buffer_status",
"clear_offline_buffer",
# User and session context functions
"set_user_session_context",
"update_trace_user_session",
"get_current_user_id",
"get_current_session_id",
"clear_user_session_context",
]
# ---------------------------------- Tracing --------------------------------- #
from .tracing import tracer
from .tracing.context import (
set_user_session_context,
update_trace_user_session,
get_current_user_id,
get_current_session_id,
clear_user_session_context,
)
configure = tracer.configure
trace = tracer.trace
trace_async = tracer.trace_async
update_current_trace = tracer.update_current_trace
update_current_step = tracer.update_current_step
# Offline buffer management functions
replay_buffered_traces = tracer.replay_buffered_traces
get_buffer_status = tracer.get_buffer_status
clear_offline_buffer = tracer.clear_offline_buffer
def trace_anthropic(client):
"""Trace Anthropic chat completions."""
# pylint: disable=import-outside-toplevel
import anthropic
from .integrations import anthropic_tracer
if not isinstance(client, anthropic.Anthropic):
raise ValueError("Invalid client. Please provide an Anthropic client.")
return anthropic_tracer.trace_anthropic(client)
def trace_openai(client):
"""Trace OpenAI chat completions."""
# pylint: disable=import-outside-toplevel
import openai
from .integrations import openai_tracer
if not isinstance(client, (openai.Client, openai.AzureOpenAI)):
raise ValueError("Invalid client. Please provide an OpenAI client.")
return openai_tracer.trace_openai(client)
def trace_async_openai(client):
"""Trace OpenAI chat completions."""
# pylint: disable=import-outside-toplevel
import openai
from .integrations import async_openai_tracer
if not isinstance(client, (openai.AsyncOpenAI, openai.AsyncAzureOpenAI)):
raise ValueError("Invalid client. Please provide an OpenAI client.")
return async_openai_tracer.trace_async_openai(client)
def trace_openai_assistant_thread_run(client, run):
"""Trace OpenAI Assistant thread run."""
# pylint: disable=import-outside-toplevel
from .integrations import openai_tracer
return openai_tracer.trace_openai_assistant_thread_run(client, run)
def trace_mistral(client):
"""Trace Mistral chat completions."""
# pylint: disable=import-outside-toplevel
import mistralai
from .integrations import mistral_tracer
if not isinstance(client, mistralai.Mistral):
raise ValueError("Invalid client. Please provide a Mistral client.")
return mistral_tracer.trace_mistral(client)
def trace_groq(client):
"""Trace Groq queries."""
# pylint: disable=import-outside-toplevel
import groq
from .integrations import groq_tracer
if not isinstance(client, groq.Groq):
raise ValueError("Invalid client. Please provide a Groq client.")
return groq_tracer.trace_groq(client)
def trace_bedrock(client):
"""Trace AWS Bedrock model invocations."""
# pylint: disable=import-outside-toplevel
try:
import boto3
except ImportError:
raise ImportError("boto3 is required for Bedrock tracing. Install with: pip install boto3")
from .integrations import bedrock_tracer
# Check if it's a boto3 client for bedrock-runtime service
if not hasattr(client, "_service_model") or client._service_model.service_name != "bedrock-runtime":
raise ValueError("Invalid client. Please provide a boto3 bedrock-runtime client.")
return bedrock_tracer.trace_bedrock(client)
def trace_azure_content_understanding(client):
"""Trace Azure Content Understanding analyses."""
# pylint: disable=import-outside-toplevel
try:
from azure.ai.contentunderstanding import ContentUnderstandingClient
except ImportError:
raise ImportError(
"azure-ai-contentunderstanding is required for Azure Content Understanding tracing. "
"Install with: pip install azure-ai-contentunderstanding"
)
from .integrations import azure_content_understanding_tracer
if not isinstance(client, ContentUnderstandingClient):
raise ValueError(
"Invalid client. Please provide a ContentUnderstandingClient."
)
return azure_content_understanding_tracer.trace_azure_content_understanding(client)
def trace_oci_genai(client, estimate_tokens: bool = True):
"""Trace OCI GenAI chat completions.
Args:
client: OCI GenAI client.
estimate_tokens: Whether to estimate tokens when not available. Defaults to True.
"""
# pylint: disable=import-outside-toplevel
try:
import oci
except ImportError:
raise ImportError("oci is required for OCI GenAI tracing. Install with: pip install oci")
from .integrations import oci_tracer
if not isinstance(client, oci.generative_ai_inference.GenerativeAiInferenceClient):
raise ValueError("Invalid client. Please provide an OCI GenAI client.")
return oci_tracer.trace_oci_genai(client, estimate_tokens=estimate_tokens)
# --------------------------------- OCI GenAI -------------------------------- #
# Alias for backward compatibility
trace_oci = trace_oci_genai
# --------------------------------- LiteLLM ---------------------------------- #
def trace_litellm():
"""Enable tracing for LiteLLM completions.
This function patches litellm.completion to automatically trace all completions
made through the LiteLLM library, which provides a unified interface to 100+ LLM APIs.
Example:
>>> import litellm
>>> from openlayer.lib import trace_litellm
>>> # Enable tracing
>>> trace_litellm()
>>> # Use LiteLLM normally - tracing happens automatically
>>> response = litellm.completion(
... model="gpt-3.5-turbo",
... messages=[{"role": "user", "content": "Hello!"}],
... inference_id="custom-id-123", # Optional Openlayer parameter
... )
"""
# pylint: disable=import-outside-toplevel
try:
import litellm
except ImportError:
raise ImportError("litellm is required for LiteLLM tracing. Install with: pip install litellm")
from .integrations import litellm_tracer
return litellm_tracer.trace_litellm()
# ---------------------------------- Portkey ---------------------------------- #
def trace_portkey():
"""Enable tracing for Portkey completions.
This function patches Portkey's chat.completions.create to automatically trace
all OpenAI-compatible completions routed via the Portkey AI Gateway.
Example:
>>> from portkey_ai import Portkey
>>> from openlayer.lib import trace_portkey
>>> # Enable openlayer tracing for all Portkey completions
>>> trace_portkey()
>>> # Basic portkey client initialization
>>> portkey = Portkey(
>>> api_key = os.environ['PORTKEY_API_KEY'],
>>> config = "YOUR_PORTKEY_CONFIG_ID", # optional your portkey config id
>>> )
>>> # use portkey normally - tracing happens automatically
>>> response = portkey.chat.completions.create(
>>> #model = "@YOUR_PORTKEY_SLUG/YOUR_MODEL_NAME", # optional if giving config
>>> messages = [
>>> {"role": "system", "content": "You are a helpful assistant."},
>>> {"role": "user", "content": "Write a poem on Argentina, least 100 words."}
>>> ]
>>> )
"""
# pylint: disable=import-outside-toplevel
try:
from portkey_ai import Portkey # noqa: F401
except ImportError:
raise ImportError("portkey-ai is required for Portkey tracing. Install with: pip install portkey-ai")
from .integrations import portkey_tracer
return portkey_tracer.trace_portkey()
# ------------------------------ Google ADK ---------------------------------- #
def trace_google_adk(disable_adk_otel: bool = False):
"""Enable tracing for Google Agent Development Kit (ADK).
This function patches Google ADK to automatically trace agent execution,
LLM calls, and tool calls made through the ADK framework.
By default, ADK's built-in OpenTelemetry tracing remains active, allowing
you to send telemetry to both Google Cloud and Openlayer simultaneously.
Args:
disable_adk_otel: If True, disables ADK's built-in OpenTelemetry tracing.
When False (default), ADK's OTel tracing works alongside Openlayer,
allowing data to be sent to both Google Cloud (Cloud Trace, etc.)
and Openlayer. Set to True only if you want Openlayer exclusively.
Requirements:
Google ADK and wrapt must be installed:
pip install google-adk wrapt
Example:
>>> import os
>>> os.environ["OPENLAYER_API_KEY"] = "your-api-key"
>>> os.environ["OPENLAYER_INFERENCE_PIPELINE_ID"] = "your-pipeline-id"
>>> from openlayer.lib import trace_google_adk
>>>
>>> # Enable tracing with both Google Cloud OTel and Openlayer (default)
>>> trace_google_adk()
>>>
>>> # OR: Enable tracing with Openlayer only (disable ADK's OTel)
>>> # trace_google_adk(disable_adk_otel=True)
>>>
>>> # Now create and run your ADK agents
>>> from google.adk.agents import Agent
>>> agent = Agent(name="Assistant", model="gemini-2.5-flash")
>>> result = await agent.run_async(...)
"""
# pylint: disable=import-outside-toplevel
from .integrations import google_adk_tracer
return google_adk_tracer.trace_google_adk(disable_adk_otel=disable_adk_otel)
def unpatch_google_adk():
"""Remove Google ADK tracing patches.
This function restores Google ADK's original behavior by removing all
Openlayer instrumentation.
Example:
>>> from openlayer.lib import unpatch_google_adk
>>> unpatch_google_adk()
"""
# pylint: disable=import-outside-toplevel
from .integrations import google_adk_tracer
return google_adk_tracer.unpatch_google_adk()
# -------------------------------- Google Gemini --------------------------------- #
def trace_gemini(client):
"""Trace Google Gemini chat completions."""
# pylint: disable=import-outside-toplevel
try:
import google.generativeai as genai
except ImportError:
raise ImportError(
"google-generativeai is required for Gemini tracing. Install with: pip install google-generativeai"
)
from .integrations import gemini_tracer
if not isinstance(client, genai.GenerativeModel):
raise ValueError("Invalid client. Please provide a google.generativeai.GenerativeModel instance.")
return gemini_tracer.trace_gemini(client)