Skip to content

Commit a8a7273

Browse files
wip: fixing the literals.
1 parent aa03039 commit a8a7273

2 files changed

Lines changed: 8 additions & 15 deletions

File tree

instrumentation-genai/opentelemetry-instrumentation-openai-v2/src/opentelemetry/instrumentation/openai_v2/response_extractors.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,10 @@
1515
openai_attributes as OpenAIAttributes,
1616
)
1717

18-
from .utils import get_server_address_and_port
18+
from .utils import (
19+
_openai_response_format_to_output_type,
20+
get_server_address_and_port,
21+
)
1922

2023
if TYPE_CHECKING:
2124
from openai.types.responses.response import Response
@@ -122,12 +125,8 @@ def _extract_output_type_from_value(text_config: object) -> str | None:
122125
return None
123126

124127
format_type = _get_field(format_config, "type")
125-
if format_type == "json_schema":
126-
return GenAIAttributes.GenAiOutputTypeValues.JSON.value
127-
if format_type == "text":
128-
return GenAIAttributes.GenAiOutputTypeValues.TEXT.value
129128
if isinstance(format_type, str):
130-
return format_type
129+
return _openai_response_format_to_output_type(format_type)
131130
return None
132131

133132

instrumentation-genai/opentelemetry-instrumentation-openai-v2/tests/test_utils.py

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -19,16 +19,10 @@
1919

2020
DEFAULT_MODEL = "gpt-4o-mini"
2121
DEFAULT_EMBEDDING_MODEL = "text-embedding-3-small"
22-
GEN_AI_USAGE_CACHE_CREATION_INPUT_TOKENS = getattr(
23-
GenAIAttributes,
24-
"GEN_AI_USAGE_CACHE_CREATION_INPUT_TOKENS",
25-
"gen_ai.usage.cache_creation.input_tokens",
26-
)
27-
GEN_AI_USAGE_CACHE_READ_INPUT_TOKENS = getattr(
28-
GenAIAttributes,
29-
"GEN_AI_USAGE_CACHE_READ_INPUT_TOKENS",
30-
"gen_ai.usage.cache_read.input_tokens",
22+
GEN_AI_USAGE_CACHE_CREATION_INPUT_TOKENS = (
23+
"gen_ai.usage.cache_creation.input_tokens"
3124
)
25+
GEN_AI_USAGE_CACHE_READ_INPUT_TOKENS = "gen_ai.usage.cache_read.input_tokens"
3226
USER_ONLY_PROMPT = [{"role": "user", "content": "Say this is a test"}]
3327
USER_ONLY_EXPECTED_INPUT_MESSAGES = [
3428
{

0 commit comments

Comments
 (0)