Skip to content

Commit d02f1ca

Browse files
add conftest
1 parent 77af6f2 commit d02f1ca

File tree

1 file changed

+169
-0
lines changed

1 file changed

+169
-0
lines changed

tests/conftest.py

Lines changed: 169 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1126,6 +1126,175 @@ def nonstreaming_chat_completions_model_response():
11261126
)
11271127

11281128

1129+
@pytest.fixture
1130+
def streaming_chat_completions_model_responses():
1131+
def inner():
1132+
yield [
1133+
openai.types.chat.chat_completion_chunk.ChatCompletionChunk(
1134+
id="chatcmpl-turn-1",
1135+
object="chat.completion.chunk",
1136+
created=10000000,
1137+
model="gpt-3.5-turbo",
1138+
choices=[
1139+
openai.types.chat.chat_completion_chunk.Choice(
1140+
index=0,
1141+
delta=openai.types.chat.chat_completion_chunk.ChoiceDelta(
1142+
role="assistant"
1143+
),
1144+
finish_reason=None,
1145+
),
1146+
],
1147+
),
1148+
openai.types.chat.chat_completion_chunk.ChatCompletionChunk(
1149+
id="chatcmpl-turn-1",
1150+
object="chat.completion.chunk",
1151+
created=10000000,
1152+
model="gpt-3.5-turbo",
1153+
choices=[
1154+
openai.types.chat.chat_completion_chunk.Choice(
1155+
index=0,
1156+
delta=openai.types.chat.chat_completion_chunk.ChoiceDelta(
1157+
tool_calls=[
1158+
openai.types.chat.chat_completion_chunk.ChoiceDeltaToolCall(
1159+
index=0,
1160+
id="call_BbeyNhCKa6kYLYzrD40NGm3b",
1161+
type="function",
1162+
function=openai.types.chat.chat_completion_chunk.ChoiceDeltaToolCallFunction(
1163+
name="get_word_length",
1164+
arguments="",
1165+
),
1166+
),
1167+
],
1168+
),
1169+
finish_reason=None,
1170+
),
1171+
],
1172+
),
1173+
openai.types.chat.chat_completion_chunk.ChatCompletionChunk(
1174+
id="chatcmpl-turn-1",
1175+
object="chat.completion.chunk",
1176+
created=10000000,
1177+
model="gpt-3.5-turbo",
1178+
choices=[
1179+
openai.types.chat.chat_completion_chunk.Choice(
1180+
index=0,
1181+
delta=openai.types.chat.chat_completion_chunk.ChoiceDelta(
1182+
tool_calls=[
1183+
openai.types.chat.chat_completion_chunk.ChoiceDeltaToolCall(
1184+
index=0,
1185+
function=openai.types.chat.chat_completion_chunk.ChoiceDeltaToolCallFunction(
1186+
arguments='{"word": "eudca"}',
1187+
),
1188+
),
1189+
],
1190+
),
1191+
finish_reason=None,
1192+
),
1193+
],
1194+
),
1195+
openai.types.chat.chat_completion_chunk.ChatCompletionChunk(
1196+
id="chatcmpl-turn-1",
1197+
object="chat.completion.chunk",
1198+
created=10000000,
1199+
model="gpt-3.5-turbo",
1200+
choices=[
1201+
openai.types.chat.chat_completion_chunk.Choice(
1202+
index=0,
1203+
delta=openai.types.chat.chat_completion_chunk.ChoiceDelta(
1204+
content="5"
1205+
),
1206+
finish_reason=None,
1207+
),
1208+
],
1209+
),
1210+
openai.types.chat.chat_completion_chunk.ChatCompletionChunk(
1211+
id="chatcmpl-turn-1",
1212+
object="chat.completion.chunk",
1213+
created=10000000,
1214+
model="gpt-3.5-turbo",
1215+
choices=[
1216+
openai.types.chat.chat_completion_chunk.Choice(
1217+
index=0,
1218+
delta=openai.types.chat.chat_completion_chunk.ChoiceDelta(),
1219+
finish_reason="function_call",
1220+
),
1221+
],
1222+
),
1223+
openai.types.chat.chat_completion_chunk.ChatCompletionChunk(
1224+
id="chatcmpl-turn-1",
1225+
object="chat.completion.chunk",
1226+
created=10000000,
1227+
model="gpt-3.5-turbo",
1228+
choices=[],
1229+
usage=openai.types.chat.chat_completion_chunk.CompletionUsage(
1230+
prompt_tokens=142,
1231+
completion_tokens=50,
1232+
total_tokens=192,
1233+
),
1234+
),
1235+
]
1236+
1237+
yield [
1238+
openai.types.chat.chat_completion_chunk.ChatCompletionChunk(
1239+
id="chatcmpl-turn-2",
1240+
object="chat.completion.chunk",
1241+
created=10000000,
1242+
model="gpt-3.5-turbo",
1243+
choices=[
1244+
openai.types.chat.chat_completion_chunk.Choice(
1245+
index=0,
1246+
delta=openai.types.chat.chat_completion_chunk.ChoiceDelta(
1247+
role="assistant"
1248+
),
1249+
finish_reason=None,
1250+
),
1251+
],
1252+
),
1253+
openai.types.chat.chat_completion_chunk.ChatCompletionChunk(
1254+
id="chatcmpl-turn-2",
1255+
object="chat.completion.chunk",
1256+
created=10000000,
1257+
model="gpt-3.5-turbo",
1258+
choices=[
1259+
openai.types.chat.chat_completion_chunk.Choice(
1260+
index=0,
1261+
delta=openai.types.chat.chat_completion_chunk.ChoiceDelta(
1262+
content="The word eudca has 5 letters."
1263+
),
1264+
finish_reason=None,
1265+
),
1266+
],
1267+
),
1268+
openai.types.chat.chat_completion_chunk.ChatCompletionChunk(
1269+
id="chatcmpl-turn-2",
1270+
object="chat.completion.chunk",
1271+
created=10000000,
1272+
model="gpt-3.5-turbo",
1273+
choices=[
1274+
openai.types.chat.chat_completion_chunk.Choice(
1275+
index=0,
1276+
delta=openai.types.chat.chat_completion_chunk.ChoiceDelta(),
1277+
finish_reason="stop",
1278+
),
1279+
],
1280+
),
1281+
openai.types.chat.chat_completion_chunk.ChatCompletionChunk(
1282+
id="chatcmpl-turn-2",
1283+
object="chat.completion.chunk",
1284+
created=10000000,
1285+
model="gpt-3.5-turbo",
1286+
choices=[],
1287+
usage=openai.types.chat.chat_completion_chunk.CompletionUsage(
1288+
prompt_tokens=89,
1289+
completion_tokens=28,
1290+
total_tokens=117,
1291+
),
1292+
),
1293+
]
1294+
1295+
return inner
1296+
1297+
11291298
@pytest.fixture
11301299
def responses_tool_call_model_responses():
11311300
def inner(

0 commit comments

Comments
 (0)