Skip to content

Commit 51e0b7f

Browse files
D-Joey-Ghassiebp
andauthored
fix(openai): include responses instructions in captured prompt (#1565)
Co-authored-by: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com>
1 parent cf68968 commit 51e0b7f

File tree

3 files changed

+79
-2
lines changed

3 files changed

+79
-2
lines changed

langfuse/openai.py

Lines changed: 29 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -246,6 +246,34 @@ def wrapper(wrapped: Any, instance: Any, args: Any, kwargs: Any) -> Any:
246246
return _with_langfuse
247247

248248

249+
def _extract_responses_prompt(kwargs: Any) -> Any:
250+
input_value = kwargs.get("input", None)
251+
instructions = kwargs.get("instructions", None)
252+
253+
if isinstance(input_value, NotGiven):
254+
input_value = None
255+
256+
if isinstance(instructions, NotGiven):
257+
instructions = None
258+
259+
if instructions is None:
260+
return input_value
261+
262+
if input_value is None:
263+
return {"instructions": instructions}
264+
265+
if isinstance(input_value, str):
266+
return [
267+
{"role": "system", "content": instructions},
268+
{"role": "user", "content": input_value},
269+
]
270+
271+
if isinstance(input_value, list):
272+
return [{"role": "system", "content": instructions}, *input_value]
273+
274+
return {"instructions": instructions, "input": input_value}
275+
276+
249277
def _extract_chat_prompt(kwargs: Any) -> Any:
250278
"""Extracts the user input from prompts. Returns an array of messages or dict with messages and functions"""
251279
prompt = {}
@@ -403,7 +431,7 @@ def _get_langfuse_data_from_kwargs(resource: OpenAiDefinition, kwargs: Any) -> A
403431
if resource.type == "completion":
404432
prompt = kwargs.get("prompt", None)
405433
elif resource.object == "Responses" or resource.object == "AsyncResponses":
406-
prompt = kwargs.get("input", None)
434+
prompt = _extract_responses_prompt(kwargs)
407435
elif resource.type == "chat":
408436
prompt = _extract_chat_prompt(kwargs)
409437
elif resource.type == "embedding":

tests/test_openai.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1407,7 +1407,10 @@ def test_response_api_streaming(openai):
14071407
assert len(generation.data) != 0
14081408
generationData = generation.data[0]
14091409
assert generationData.name == generation_name
1410-
assert generation.data[0].input == "Hello!"
1410+
assert generation.data[0].input == [
1411+
{"role": "system", "content": "You are a helpful assistant."},
1412+
{"role": "user", "content": "Hello!"},
1413+
]
14111414
assert generationData.type == "GENERATION"
14121415
assert "gpt-4o" in generationData.model
14131416
assert generationData.start_time is not None
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import pytest
2+
3+
try:
4+
# Compatibility across OpenAI SDK versions where NOT_GIVEN export moved.
5+
from openai import NOT_GIVEN
6+
except ImportError:
7+
from openai._types import NOT_GIVEN
8+
9+
from langfuse.openai import _extract_responses_prompt
10+
11+
12+
@pytest.mark.parametrize(
13+
"kwargs, expected",
14+
[
15+
({"input": "Hello!"}, "Hello!"),
16+
(
17+
{"instructions": "You are helpful.", "input": "Hello!"},
18+
[
19+
{"role": "system", "content": "You are helpful."},
20+
{"role": "user", "content": "Hello!"},
21+
],
22+
),
23+
(
24+
{
25+
"instructions": "You are helpful.",
26+
"input": [{"role": "user", "content": "Hello!"}],
27+
},
28+
[
29+
{"role": "system", "content": "You are helpful."},
30+
{"role": "user", "content": "Hello!"},
31+
],
32+
),
33+
(
34+
{"instructions": "You are helpful."},
35+
{"instructions": "You are helpful."},
36+
),
37+
(
38+
{"instructions": "You are helpful.", "input": NOT_GIVEN},
39+
{"instructions": "You are helpful."},
40+
),
41+
({"instructions": NOT_GIVEN, "input": "Hello!"}, "Hello!"),
42+
({"instructions": NOT_GIVEN, "input": NOT_GIVEN}, None),
43+
],
44+
)
45+
def test_extract_responses_prompt(kwargs, expected):
46+
assert _extract_responses_prompt(kwargs) == expected

0 commit comments

Comments
 (0)