Skip to content

Commit b576e39

Browse files
committed
fix: tighten Inception provider copy and drop redundant comment
Describe Mercury 2 as a diffusion model and scope the provider blurbs to Mercury 2 specifically rather than a Mercury family.
1 parent 0858623 commit b576e39

3 files changed

Lines changed: 3 additions & 4 deletions

File tree

core/llm/toolSupport.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -271,7 +271,6 @@ export const PROVIDER_TOOL_SUPPORT: Record<string, (model: string) => boolean> =
271271
return false;
272272
},
273273
inception: (model) => {
274-
// https://docs.inceptionlabs.ai/ - mercury-2 supports tool calling
275274
const lower = model.toLowerCase();
276275
return lower.startsWith("mercury-2");
277276
},

gui/src/pages/AddNewModel/configs/providers.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -595,9 +595,9 @@ Select the \`GPT-4o\` model below to complete your provider configuration, but n
595595
provider: "inception",
596596
icon: "inception.png",
597597
description:
598-
"Inception Labs provides Mercury, the fastest diffusion-based LLM family with 128k context and tool calling.",
598+
"Inception Labs provides Mercury 2, a fast diffusion model with 128k context and tool calling.",
599599
longDescription:
600-
"To get started with Inception Labs, obtain an API key from the [Inception Labs platform](https://platform.inceptionlabs.ai/). Their Mercury models are OpenAI-compatible and support chat, tool calling, and structured outputs.",
600+
"To get started with Inception Labs, obtain an API key from the [Inception Labs platform](https://platform.inceptionlabs.ai/). Mercury 2 is OpenAI-compatible and supports chat, tool calling, and structured outputs.",
601601
tags: [ModelProviderTags.RequiresApiKey],
602602
collectInputFor: [
603603
{

packages/llm-info/src/providers/inception.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ export const Inception: ModelProvider = {
77
displayName: "Mercury 2",
88
contextLength: 128000,
99
description:
10-
"Inception Labs' fastest reasoning LLM and their most powerful model, with tool calling and structured outputs support.",
10+
"Inception Labs' fastest reasoning diffusion model and their most powerful model, with tool calling and structured outputs support.",
1111
regex: /mercury-2/i,
1212
recommendedFor: ["chat"],
1313
},

0 commit comments

Comments
 (0)