Skip to content

Commit cc61be0

Browse files
fix(types): constrain endpoint parameter to literals in batches
1 parent 65c1756 commit cc61be0

3 files changed

Lines changed: 31 additions & 9 deletions

File tree

.stats.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 75
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/togetherai/togetherai-b584185aba41e3d597bf715d9b704f1c7d2663ae7d8f3f3c35e63d603738ee9c.yml
3-
openapi_spec_hash: 0b26ddf285392dd9f629c1161db62376
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/togetherai/togetherai-72e377e4d7f3fe8bb9f0dab1a70d7d9cf0f44914ff4d9b8ab238bc7f48008621.yml
3+
openapi_spec_hash: 798c6d992a5cb83901b5879502c22f9d
44
config_hash: 6c214c91fad5ead4849be777fd9e8108

src/together/resources/batches.py

Lines changed: 18 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22

33
from __future__ import annotations
44

5+
from typing_extensions import Literal
6+
57
import httpx
68

79
from ..types import batch_create_params
@@ -46,7 +48,7 @@ def with_streaming_response(self) -> BatchesResourceWithStreamingResponse:
4648
def create(
4749
self,
4850
*,
49-
endpoint: str,
51+
endpoint: Literal["/v1/chat/completions", "/v1/audio/transcriptions", "/v1/audio/translations"],
5052
input_file_id: str,
5153
completion_window: str | Omit = omit,
5254
model_id: str | Omit = omit,
@@ -62,7 +64,13 @@ def create(
6264
Create a new batch job with the given input file and endpoint
6365
6466
Args:
65-
endpoint: The endpoint to use for batch processing
67+
endpoint: The endpoint to use for batch processing. Each line of the uploaded input file
68+
is dispatched against this endpoint.
69+
70+
- `/v1/chat/completions` — chat completion batches
71+
- `/v1/audio/transcriptions` — audio transcription batches (e.g.
72+
`openai/whisper-large-v3`)
73+
- `/v1/audio/translations` — audio translation batches
6674
6775
input_file_id: ID of the uploaded input file containing batch requests
6876
@@ -211,7 +219,7 @@ def with_streaming_response(self) -> AsyncBatchesResourceWithStreamingResponse:
211219
async def create(
212220
self,
213221
*,
214-
endpoint: str,
222+
endpoint: Literal["/v1/chat/completions", "/v1/audio/transcriptions", "/v1/audio/translations"],
215223
input_file_id: str,
216224
completion_window: str | Omit = omit,
217225
model_id: str | Omit = omit,
@@ -227,7 +235,13 @@ async def create(
227235
Create a new batch job with the given input file and endpoint
228236
229237
Args:
230-
endpoint: The endpoint to use for batch processing
238+
endpoint: The endpoint to use for batch processing. Each line of the uploaded input file
239+
is dispatched against this endpoint.
240+
241+
- `/v1/chat/completions` — chat completion batches
242+
- `/v1/audio/transcriptions` — audio transcription batches (e.g.
243+
`openai/whisper-large-v3`)
244+
- `/v1/audio/translations` — audio translation batches
231245
232246
input_file_id: ID of the uploaded input file containing batch requests
233247

src/together/types/batch_create_params.py

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,22 @@
22

33
from __future__ import annotations
44

5-
from typing_extensions import Required, TypedDict
5+
from typing_extensions import Literal, Required, TypedDict
66

77
__all__ = ["BatchCreateParams"]
88

99

1010
class BatchCreateParams(TypedDict, total=False):
11-
endpoint: Required[str]
12-
"""The endpoint to use for batch processing"""
11+
endpoint: Required[Literal["/v1/chat/completions", "/v1/audio/transcriptions", "/v1/audio/translations"]]
12+
"""The endpoint to use for batch processing.
13+
14+
Each line of the uploaded input file is dispatched against this endpoint.
15+
16+
- `/v1/chat/completions` — chat completion batches
17+
- `/v1/audio/transcriptions` — audio transcription batches (e.g.
18+
`openai/whisper-large-v3`)
19+
- `/v1/audio/translations` — audio translation batches
20+
"""
1321

1422
input_file_id: Required[str]
1523
"""ID of the uploaded input file containing batch requests"""

0 commit comments

Comments
 (0)