Skip to content

Commit 57ff107

Browse files
committed
openai-v2: add test for chat completions create as streaming context manager
1 parent d0d895d commit 57ff107

2 files changed

Lines changed: 195 additions & 0 deletions

File tree

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,143 @@
1+
interactions:
2+
- request:
3+
body: |-
4+
{
5+
"messages": [
6+
{
7+
"role": "user",
8+
"content": "Say this is a test"
9+
}
10+
],
11+
"model": "gpt-4o-mini",
12+
"stream": true,
13+
"stream_options": {
14+
"include_usage": true
15+
}
16+
}
17+
headers:
18+
Accept:
19+
- application/json
20+
Accept-Encoding:
21+
- gzip, deflate
22+
Connection:
23+
- keep-alive
24+
Content-Length:
25+
- '148'
26+
Content-Type:
27+
- application/json
28+
Host:
29+
- api.openai.com
30+
User-Agent:
31+
- OpenAI/Python 1.109.1
32+
X-Stainless-Arch:
33+
- x64
34+
X-Stainless-Async:
35+
- 'false'
36+
X-Stainless-Lang:
37+
- python
38+
X-Stainless-OS:
39+
- Linux
40+
X-Stainless-Package-Version:
41+
- 1.109.1
42+
X-Stainless-Raw-Response:
43+
- 'true'
44+
X-Stainless-Runtime:
45+
- CPython
46+
X-Stainless-Runtime-Version:
47+
- 3.12.12
48+
authorization:
49+
- Bearer test_openai_api_key
50+
x-stainless-read-timeout:
51+
- '600'
52+
x-stainless-retry-count:
53+
- '0'
54+
method: POST
55+
uri: https://api.openai.com/v1/chat/completions
56+
response:
57+
body:
58+
string: |+
59+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"O0oN0CP7u"}
60+
61+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[{"index":0,"delta":{"content":"This"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"HOMotuQ"}
62+
63+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[{"index":0,"delta":{"content":" is"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"LYznWA8Y"}
64+
65+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[{"index":0,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"nYSWcAuhj"}
66+
67+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[{"index":0,"delta":{"content":" test"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"zM3Mvs"}
68+
69+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"pj41T0w9RH"}
70+
71+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[{"index":0,"delta":{"content":" How"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"nr6uc6M"}
72+
73+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[{"index":0,"delta":{"content":" can"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"zgV1iqQ"}
74+
75+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[{"index":0,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"jMAItCBRP"}
76+
77+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[{"index":0,"delta":{"content":" assist"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"n00J"}
78+
79+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[{"index":0,"delta":{"content":" you"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"YipqI9K"}
80+
81+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[{"index":0,"delta":{"content":" further"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"PBM"}
82+
83+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"CCufikJzDT"}
84+
85+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"usage":null,"obfuscation":"i6voU"}
86+
87+
data: {"id":"chatcmpl-Cn5osIz68gdrhlraoRxxItrACbME5","object":"chat.completion.chunk","created":1765816478,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_11f3029f6b","choices":[],"usage":{"prompt_tokens":12,"completion_tokens":12,"total_tokens":24,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}},"obfuscation":"8xsH1Fy9T3"}
88+
89+
data: [DONE]
90+
91+
headers:
92+
CF-RAY:
93+
- 9ae75976adddb5aa-MXP
94+
Connection:
95+
- keep-alive
96+
Content-Type:
97+
- text/event-stream; charset=utf-8
98+
Date:
99+
- Mon, 15 Dec 2025 16:34:38 GMT
100+
Server:
101+
- cloudflare
102+
Set-Cookie: test_set_cookie
103+
Strict-Transport-Security:
104+
- max-age=31536000; includeSubDomains; preload
105+
Transfer-Encoding:
106+
- chunked
107+
X-Content-Type-Options:
108+
- nosniff
109+
access-control-expose-headers:
110+
- X-Request-ID
111+
alt-svc:
112+
- h3=":443"; ma=86400
113+
cf-cache-status:
114+
- DYNAMIC
115+
openai-organization: test_openai_org_id
116+
openai-processing-ms:
117+
- '239'
118+
openai-project:
119+
- proj_Pf1eM5R55Z35wBy4rt8PxAGq
120+
openai-version:
121+
- '2020-10-01'
122+
x-envoy-upstream-service-time:
123+
- '543'
124+
x-openai-proxy-wasm:
125+
- v0.1
126+
x-ratelimit-limit-requests:
127+
- '10000'
128+
x-ratelimit-limit-tokens:
129+
- '10000000'
130+
x-ratelimit-remaining-requests:
131+
- '9999'
132+
x-ratelimit-remaining-tokens:
133+
- '9999993'
134+
x-ratelimit-reset-requests:
135+
- 6ms
136+
x-ratelimit-reset-tokens:
137+
- 0s
138+
x-request-id:
139+
- req_864c78af073c42faa55805a4c81e4a0d
140+
status:
141+
code: 200
142+
message: OK
143+
version: 1

instrumentation-genai/opentelemetry-instrumentation-openai-v2/tests/test_chat_completions.py

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -903,6 +903,58 @@ def test_chat_completion_with_content_span_unsampled(
903903
assert logs[0].log_record.trace_flags == logs[1].log_record.trace_flags
904904

905905

906+
@pytest.mark.vcr()
907+
def test_chat_completion_with_context_manager_streaming(
908+
span_exporter, log_exporter, openai_client, instrument_with_content
909+
):
910+
llm_model_value = "gpt-4o-mini"
911+
messages_value = [{"role": "user", "content": "Say this is a test"}]
912+
with openai_client.chat.completions.create(
913+
messages=messages_value,
914+
model=llm_model_value,
915+
stream=True,
916+
stream_options={"include_usage": True},
917+
) as response:
918+
message_content = ""
919+
for chunk in response:
920+
if chunk.choices:
921+
message_content += chunk.choices[0].delta.content or ""
922+
# get the last chunk
923+
if getattr(chunk, "usage", None):
924+
response_stream_usage = chunk.usage
925+
response_stream_model = chunk.model
926+
response_stream_id = chunk.id
927+
928+
spans = span_exporter.get_finished_spans()
929+
assert_all_attributes(
930+
spans[0],
931+
llm_model_value,
932+
response_stream_id,
933+
response_stream_model,
934+
response_stream_usage.prompt_tokens,
935+
response_stream_usage.completion_tokens,
936+
response_service_tier="default",
937+
)
938+
939+
logs = log_exporter.get_finished_logs()
940+
assert len(logs) == 2
941+
942+
user_message = {"content": messages_value[0]["content"]}
943+
assert_message_in_logs(
944+
logs[0], "gen_ai.user.message", user_message, spans[0]
945+
)
946+
947+
choice_event = {
948+
"index": 0,
949+
"finish_reason": "stop",
950+
"message": {
951+
"role": "assistant",
952+
"content": message_content,
953+
},
954+
}
955+
assert_message_in_logs(logs[1], "gen_ai.choice", choice_event, spans[0])
956+
957+
906958
def chat_completion_multiple_tools_streaming(
907959
span_exporter, log_exporter, openai_client, expect_content
908960
):

0 commit comments

Comments
 (0)