Skip to content

Commit 9a47e6b

Browse files
committed
update OpenAI model ID from gpt-4.1-mini to gpt-5-mini-2025-08-07 across all files
1 parent e67260c commit 9a47e6b

16 files changed

Lines changed: 28 additions & 28 deletions

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ config = ClassificationDatasetConfig(
103103
```python
104104
# Create LLM providers
105105
providers = [
106-
OpenAIProvider(model_id="gpt-4.1-mini-2025-04-14"),
106+
OpenAIProvider(model_id="gpt-5-mini-2025-08-07"),
107107
AnthropicProvider(model_id="claude-3-5-haiku-latest"),
108108
GeminiProvider(model_id="gemini-2.0-flash")
109109
]

datafast/examples/classification_trail_conditions_example.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@
5858

5959
# Set up providers
6060
providers = [
61-
OpenAIProvider(model_id="gpt-4.1-mini-2025-04-14"),
61+
OpenAIProvider(model_id="gpt-5-mini-2025-08-07"),
6262
AnthropicProvider(model_id="claude-3-5-haiku-latest")
6363
]
6464

datafast/examples/generic_pipeline_row_model_example.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
question="Qu'est ce qui t'a plu?",
3333

3434
# System fields
35-
model_id="gpt-4",
35+
model_id="gpt-5-mini-2025-08-07",
3636
language="fr"
3737
)
3838

datafast/examples/inspect_dataset_example.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
from datafast.llms import OpenAIProvider, AnthropicProvider, GeminiProvider
4747

4848
providers = [
49-
OpenAIProvider(model_id="gpt-4.1-nano"),
49+
OpenAIProvider(model_id="gpt-5-mini-2025-08-07"),
5050
# Uncomment to use additional providers
5151
# AnthropicProvider(model_id="claude-3-5-haiku-latest"),
5252
# GeminiProvider(model_id="gemini-2.0-flash"),

datafast/examples/mcq_contextual_example.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,9 +50,9 @@ def main():
5050
output_file="mcq_ar6_contextual_dataset.jsonl",
5151
)
5252

53-
# 3. Initialize OpenAI provider with gpt-4.1-mini
53+
# 3. Initialize OpenAI provider with gpt-5-mini-2025-08-07
5454
providers = [
55-
OpenAIProvider(model_id="gpt-4.1-mini"),
55+
OpenAIProvider(model_id="gpt-5-mini-2025-08-07"),
5656
]
5757

5858
# 4. Generate the dataset

datafast/examples/mcq_example.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def main():
2525

2626
# 2. Initialize LLM providers
2727
providers = [
28-
OpenAIProvider(model_id="gpt-4.1-mini-2025-04-14"),
28+
OpenAIProvider(model_id="gpt-5-mini-2025-08-07"),
2929
# AnthropicProvider(model_id="claude-3-5-haiku-latest"),
3030
# GeminiProvider(model_id="gemini-2.0-flash"),
3131
]

datafast/examples/preference_dataset_example.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,10 +44,10 @@ def main():
4444
)
4545

4646
# 2. Initialize LLM providers
47-
question_gen_llm = OpenAIProvider(model_id="gpt-4.1-mini")
47+
question_gen_llm = OpenAIProvider(model_id="gpt-5-mini-2025-08-07")
4848
chosen_response_gen_llm = AnthropicProvider(model_id="claude-3-7-sonnet-latest")
4949
rejected_response_gen_llm = GeminiProvider(model_id="gemini-2.0-flash")
50-
judge_llm = OpenAIProvider(model_id="gpt-4.1")
50+
judge_llm = OpenAIProvider(model_id="gpt-5-mini-2025-08-07")
5151

5252
# 3. Generate the dataset
5353
dataset = PreferenceDataset(config)

datafast/examples/raw_text_space_engineering_example.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def main():
4242

4343
# 2. Create LLM providers with specific models
4444
providers = [
45-
OpenAIProvider(model_id="gpt-4.1-mini-2025-04-14"), # You may want to use stronger models
45+
OpenAIProvider(model_id="gpt-5-mini-2025-08-07"), # You may want to use stronger models
4646
AnthropicProvider(model_id="claude-3-5-haiku-latest"),
4747
]
4848

datafast/llms.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -242,7 +242,7 @@ def env_key_name(self) -> str:
242242

243243
def __init__(
244244
self,
245-
model_id: str = "gpt-4.1-mini-2025-04-14",
245+
model_id: str = "gpt-5-mini-2025-08-07",
246246
api_key: str | None = None,
247247
temperature: float | None = None,
248248
max_completion_tokens: int | None = None,
@@ -252,7 +252,7 @@ def __init__(
252252
"""Initialize the OpenAI provider.
253253
254254
Args:
255-
model_id: The model ID (defaults to gpt-4.1-mini-2025-04-14)
255+
model_id: The model ID (defaults to gpt-5-mini-2025-08-07)
256256
api_key: API key (if None, will get from environment)
257257
temperature: The sampling temperature to be used, between 0 and 2. Higher values like 0.8 produce more random outputs, while lower values like 0.2 make outputs more focused and deterministic
258258
max_completion_tokens: An upper bound for the number of tokens that can be generated for a completion, including visible output tokens and reasoning tokens.
@@ -419,7 +419,7 @@ def env_key_name(self) -> str:
419419

420420
def __init__(
421421
self,
422-
model_id: str = "openai/gpt-4.1-mini", # for default model
422+
model_id: str = "openai/gpt-5-mini", # for default model
423423
api_key: str | None = None,
424424
temperature: float | None = None,
425425
max_completion_tokens: int | None = None,
@@ -429,7 +429,7 @@ def __init__(
429429
"""Initialize the OpenRouter provider.
430430
431431
Args:
432-
model_id: The model ID (defaults to openai/gpt-4.1-mini)
432+
model_id: The model ID (defaults to openai/gpt-5-mini)
433433
api_key: API key (if None, will get from environment)
434434
temperature: Temperature for generation (0.0 to 1.0)
435435
max_completion_tokens: Maximum tokens to generate

docs/guides/generating_mcq_datasets.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ Configure one or more LLM providers to generate your dataset:
183183

184184
```python
185185
providers = [
186-
OpenAIProvider(model_id="gpt-4.1-mini-2025-04-14"),
186+
OpenAIProvider(model_id="gpt-5-mini-2025-08-07"),
187187
AnthropicProvider(model_id="claude-3-5-haiku-latest"),
188188
GeminiProvider(model_id="gemini-2.0-flash")
189189
]
@@ -272,7 +272,7 @@ def main():
272272

273273
# 2. Initialize LLM providers
274274
providers = [
275-
OpenAIProvider(model_id="gpt-4.1-mini-2025-04-14"),
275+
OpenAIProvider(model_id="gpt-5-mini-2025-08-07"),
276276
# Add more providers as needed
277277
# AnthropicProvider(model_id="claude-3-5-haiku-latest"),
278278
# GeminiProvider(model_id="gemini-2.0-flash"),

0 commit comments

Comments
 (0)