|
4 | 4 | from sentry_sdk import consts |
5 | 5 | from sentry_sdk.ai.monitoring import record_token_usage |
6 | 6 | from sentry_sdk.consts import OP, SPANDATA |
7 | | -from sentry_sdk.ai.utils import set_data_normalized, normalize_message_roles |
| 7 | +from sentry_sdk.ai.utils import set_data_normalized |
8 | 8 |
|
9 | 9 | from typing import TYPE_CHECKING |
10 | 10 |
|
|
39 | 39 | from cohere import StreamedChatResponse_StreamEnd as StreamEndStreamedChatResponse |
40 | 40 |
|
41 | 41 |
|
| 42 | +COHERE_ROLE_MAPPING = { |
| 43 | + "SYSTEM": "system", |
| 44 | + "USER": "user", |
| 45 | + "CHATBOT": "assistant", |
| 46 | + "TOOL": "tool", |
| 47 | +} |
| 48 | + |
| 49 | + |
42 | 50 | COLLECTED_CHAT_PARAMS = { |
43 | 51 | "model": SPANDATA.GEN_AI_REQUEST_MODEL, |
44 | 52 | "k": SPANDATA.GEN_AI_REQUEST_TOP_K, |
@@ -157,14 +165,14 @@ def new_chat(*args: "Any", **kwargs: "Any") -> "Any": |
157 | 165 | if should_send_default_pii() and integration.include_prompts: |
158 | 166 | messages = [] |
159 | 167 | for x in kwargs.get("chat_history", []): |
| 168 | + role = getattr(x, "role", "") |
160 | 169 | messages.append( |
161 | 170 | { |
162 | | - "role": getattr(x, "role", ""), |
| 171 | + "role": COHERE_ROLE_MAPPING.get(role, role), |
163 | 172 | "content": getattr(x, "message", ""), |
164 | 173 | } |
165 | 174 | ) |
166 | 175 | messages.append({"role": "user", "content": message}) |
167 | | - messages = normalize_message_roles(messages) |
168 | 176 | set_data_normalized( |
169 | 177 | span, |
170 | 178 | SPANDATA.GEN_AI_REQUEST_MESSAGES, |
|
0 commit comments