Skip to content

Commit 950b011

Browse files
Merge pull request #122 from patrickfleith/release/0.0.27
Release/0.0.27
2 parents bfd763a + 45af39d commit 950b011

23 files changed

+385
-56
lines changed

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -103,8 +103,8 @@ config = ClassificationDatasetConfig(
103103
```python
104104
# Create LLM providers
105105
providers = [
106-
OpenAIProvider(model_id="gpt-4.1-mini-2025-04-14"),
107-
AnthropicProvider(model_id="claude-3-5-haiku-latest"),
106+
OpenAIProvider(model_id="gpt-5-mini-2025-08-07"),
107+
AnthropicProvider(model_id="claude-haiku-4-5-20251001"),
108108
GeminiProvider(model_id="gemini-2.0-flash")
109109
]
110110
```

datafast/examples/classification_trail_conditions_example.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,8 +58,8 @@
5858

5959
# Set up providers
6060
providers = [
61-
OpenAIProvider(model_id="gpt-4.1-mini-2025-04-14"),
62-
AnthropicProvider(model_id="claude-3-5-haiku-latest")
61+
OpenAIProvider(model_id="gpt-5-mini-2025-08-07"),
62+
AnthropicProvider(model_id="claude-haiku-4-5-20251001")
6363
]
6464

6565
# Generate dataset

datafast/examples/generic_pipeline_example.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def main():
4242
model_id="gpt-5-mini-2025-08-07",
4343
temperature=1
4444
),
45-
# AnthropicProvider(model_id="claude-3-5-haiku-latest"),
45+
# AnthropicProvider(model_id="claude-haiku-4-5-20251001"),
4646
# GeminiProvider(model_id="gemini-2.5-flash-lite", rpm_limit=15),
4747
# OllamaProvider(model_id="gemma3:4b"),
4848
]

datafast/examples/generic_pipeline_row_model_example.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
question="Qu'est ce qui t'a plu?",
3333

3434
# System fields
35-
model_id="gpt-4",
35+
model_id="gpt-5-mini-2025-08-07",
3636
language="fr"
3737
)
3838

datafast/examples/inspect_dataset_example.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,9 +46,9 @@
4646
from datafast.llms import OpenAIProvider, AnthropicProvider, GeminiProvider
4747

4848
providers = [
49-
OpenAIProvider(model_id="gpt-4.1-nano"),
49+
OpenAIProvider(model_id="gpt-5-mini-2025-08-07"),
5050
# Uncomment to use additional providers
51-
# AnthropicProvider(model_id="claude-3-5-haiku-latest"),
51+
# AnthropicProvider(model_id="claude-haiku-4-5-20251001"),
5252
# GeminiProvider(model_id="gemini-2.0-flash"),
5353
]
5454

datafast/examples/mcq_contextual_example.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,9 +50,9 @@ def main():
5050
output_file="mcq_ar6_contextual_dataset.jsonl",
5151
)
5252

53-
# 3. Initialize OpenAI provider with gpt-4.1-mini
53+
# 3. Initialize OpenAI provider with gpt-5-mini-2025-08-07
5454
providers = [
55-
OpenAIProvider(model_id="gpt-4.1-mini"),
55+
OpenAIProvider(model_id="gpt-5-mini-2025-08-07"),
5656
]
5757

5858
# 4. Generate the dataset

datafast/examples/mcq_example.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ def main():
2525

2626
# 2. Initialize LLM providers
2727
providers = [
28-
OpenAIProvider(model_id="gpt-4.1-mini-2025-04-14"),
29-
# AnthropicProvider(model_id="claude-3-5-haiku-latest"),
28+
OpenAIProvider(model_id="gpt-5-mini-2025-08-07"),
29+
# AnthropicProvider(model_id="claude-haiku-4-5-20251001"),
3030
# GeminiProvider(model_id="gemini-2.0-flash"),
3131
]
3232

datafast/examples/preference_dataset_example.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,10 +44,10 @@ def main():
4444
)
4545

4646
# 2. Initialize LLM providers
47-
question_gen_llm = OpenAIProvider(model_id="gpt-4.1-mini")
47+
question_gen_llm = OpenAIProvider(model_id="gpt-5-mini-2025-08-07")
4848
chosen_response_gen_llm = AnthropicProvider(model_id="claude-3-7-sonnet-latest")
4949
rejected_response_gen_llm = GeminiProvider(model_id="gemini-2.0-flash")
50-
judge_llm = OpenAIProvider(model_id="gpt-4.1")
50+
judge_llm = OpenAIProvider(model_id="gpt-5-mini-2025-08-07")
5151

5252
# 3. Generate the dataset
5353
dataset = PreferenceDataset(config)

datafast/examples/quickstart_example.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535

3636
providers = [
3737
OpenAIProvider(model_id="gpt-5-nano-2025-08-07"),
38-
# AnthropicProvider(model_id="claude-3-5-haiku-latest"),
38+
# AnthropicProvider(model_id="claude-haiku-4-5-20251001"),
3939
# GeminiProvider(model_id="gemini-2.0-flash"),
4040
# OllamaProvider(model_id="gemma3:12b")
4141
]

datafast/examples/raw_text_space_engineering_example.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,8 @@ def main():
4242

4343
# 2. Create LLM providers with specific models
4444
providers = [
45-
OpenAIProvider(model_id="gpt-4.1-mini-2025-04-14"), # You may want to use stronger models
46-
AnthropicProvider(model_id="claude-3-5-haiku-latest"),
45+
OpenAIProvider(model_id="gpt-5-mini-2025-08-07"), # You may want to use stronger models
46+
AnthropicProvider(model_id="claude-haiku-4-5-20251001"),
4747
]
4848

4949
# 3. Generate the dataset

0 commit comments

Comments
 (0)