Skip to content

Commit 017f854

Browse files
add more tests and merge
2 parents 079ff1b + ad16c7f commit 017f854

File tree

2 files changed

+872
-161
lines changed

2 files changed

+872
-161
lines changed

tests/conftest.py

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1080,6 +1080,28 @@ def inner(response_content, serialize_pydantic=False, request_headers=None):
10801080
return inner
10811081

10821082

1083+
@pytest.fixture
1084+
def get_rate_limit_model_response():
1085+
def inner(request_headers=None):
1086+
if request_headers is None:
1087+
request_headers = {}
1088+
1089+
model_request = HttpxRequest(
1090+
"POST",
1091+
"/responses",
1092+
headers=request_headers,
1093+
)
1094+
1095+
response = HttpxResponse(
1096+
429,
1097+
request=model_request,
1098+
)
1099+
1100+
return response
1101+
1102+
return inner
1103+
1104+
10831105
@pytest.fixture
10841106
def streaming_chat_completions_model_response():
10851107
return [
@@ -1218,6 +1240,25 @@ def nonstreaming_chat_completions_model_response():
12181240
)
12191241

12201242

1243+
@pytest.fixture
1244+
def openai_embedding_model_response():
1245+
return openai.types.CreateEmbeddingResponse(
1246+
data=[
1247+
openai.types.Embedding(
1248+
embedding=[0.1, 0.2, 0.3],
1249+
index=0,
1250+
object="embedding",
1251+
)
1252+
],
1253+
model="text-embedding-ada-002",
1254+
object="list",
1255+
usage=openai.types.create_embedding_response.Usage(
1256+
prompt_tokens=5,
1257+
total_tokens=5,
1258+
),
1259+
)
1260+
1261+
12211262
@pytest.fixture
12221263
def nonstreaming_responses_model_response():
12231264
return openai.types.responses.Response(

0 commit comments

Comments
 (0)