diff --git a/server/ee/apps/runtime-job-app/build.gradle.kts b/server/ee/apps/runtime-job-app/build.gradle.kts index b9f644d3ac1..fe3dc6a66c3 100644 --- a/server/ee/apps/runtime-job-app/build.gradle.kts +++ b/server/ee/apps/runtime-job-app/build.gradle.kts @@ -63,6 +63,7 @@ dependencies { implementation(project(":server:libs:modules:components:ai:llm:nvidia")) implementation(project(":server:libs:modules:components:ai:llm:ollama")) implementation(project(":server:libs:modules:components:ai:llm:open-ai")) + implementation(project(":server:libs:modules:components:ai:llm:open-router")) implementation(project(":server:libs:modules:components:ai:llm:perplexity")) implementation(project(":server:libs:modules:components:ai:llm:stability")) implementation(project(":server:libs:modules:components:ai:llm:vertex:gemini")) diff --git a/server/libs/modules/components/ai/llm/open-router/build.gradle.kts b/server/libs/modules/components/ai/llm/open-router/build.gradle.kts new file mode 100644 index 00000000000..f25cf48db25 --- /dev/null +++ b/server/libs/modules/components/ai/llm/open-router/build.gradle.kts @@ -0,0 +1,5 @@ +version="1.0" + +dependencies { + implementation("org.springframework.ai:spring-ai-openai") +} diff --git a/server/libs/modules/components/ai/llm/hugging-face/src/main/java/com/bytechef/component/ai/llm/hugging/face/HuggingFaceComponentHandler.java b/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/OpenRouterComponentHandler.java similarity index 58% rename from server/libs/modules/components/ai/llm/hugging-face/src/main/java/com/bytechef/component/ai/llm/hugging/face/HuggingFaceComponentHandler.java rename to server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/OpenRouterComponentHandler.java index b4136bfd893..76d9eda946b 100644 --- a/server/libs/modules/components/ai/llm/hugging-face/src/main/java/com/bytechef/component/ai/llm/hugging/face/HuggingFaceComponentHandler.java +++ b/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/OpenRouterComponentHandler.java @@ -14,35 +14,33 @@ * limitations under the License. */ -package com.bytechef.component.ai.llm.hugging.face; +package com.bytechef.component.ai.llm.open.router; import static com.bytechef.component.definition.ComponentDsl.component; import com.bytechef.component.ComponentHandler; -import com.bytechef.component.ai.llm.hugging.face.action.HuggingFaceChatAction; -import com.bytechef.component.ai.llm.hugging.face.cluster.HuggingFaceChatModel; -import com.bytechef.component.ai.llm.hugging.face.connection.HuggingFaceConnection; +import com.bytechef.component.ai.llm.open.router.action.OpenRouterChatAction; +import com.bytechef.component.ai.llm.open.router.cluster.OpenRouterChatModel; +import com.bytechef.component.ai.llm.open.router.connection.OpenRouterConnection; import com.bytechef.component.definition.ComponentCategory; import com.bytechef.component.definition.ComponentDefinition; import com.google.auto.service.AutoService; /** - * @author Monika Domiter * @author Marko Kriskovic */ @AutoService(ComponentHandler.class) -public class HuggingFaceComponentHandler implements ComponentHandler { +public class OpenRouterComponentHandler implements ComponentHandler { - private static final ComponentDefinition COMPONENT_DEFINITION = component("huggingFace") - .title("Hugging Face") + private static final ComponentDefinition COMPONENT_DEFINITION = component("openRouter") + .title("Open Router") .description( - "Hugging Face is on a journey to advance and democratize artificial intelligence through open source " + - "and open science.") - .icon("path:assets/hugging-face.svg") + "OpenRouter provides a unified API that gives you access to hundreds of AI models through a single endpoint, while automatically handling fallbacks and selecting the most cost-effective options.") + .icon("path:assets/open-router.svg") .categories(ComponentCategory.ARTIFICIAL_INTELLIGENCE) - .connection(HuggingFaceConnection.CONNECTION_DEFINITION) - .actions(HuggingFaceChatAction.ACTION_DEFINITION) - .clusterElements(HuggingFaceChatModel.CLUSTER_ELEMENT_DEFINITION); + .connection(OpenRouterConnection.CONNECTION_DEFINITION) + .actions(OpenRouterChatAction.ACTION_DEFINITION) + .clusterElements(OpenRouterChatModel.CLUSTER_ELEMENT_DEFINITION); @Override public ComponentDefinition getDefinition() { diff --git a/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/action/OpenRouterChatAction.java b/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/action/OpenRouterChatAction.java new file mode 100644 index 00000000000..fed123af283 --- /dev/null +++ b/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/action/OpenRouterChatAction.java @@ -0,0 +1,161 @@ +/* + * Copyright 2025 ByteChef + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.bytechef.component.ai.llm.open.router.action; + +import static com.bytechef.component.ai.llm.ChatModel.ResponseFormat.TEXT; +import static com.bytechef.component.ai.llm.constant.LLMConstants.ASK; +import static com.bytechef.component.ai.llm.constant.LLMConstants.ATTACHMENTS_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.FORMAT_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.FREQUENCY_PENALTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.FREQUENCY_PENALTY_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.LOGIT_BIAS; +import static com.bytechef.component.ai.llm.constant.LLMConstants.LOGIT_BIAS_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.MAX_TOKENS; +import static com.bytechef.component.ai.llm.constant.LLMConstants.MAX_TOKENS_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.MESSAGES_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.MODEL; +import static com.bytechef.component.ai.llm.constant.LLMConstants.PRESENCE_PENALTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.PRESENCE_PENALTY_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.PROMPT_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.REASONING; +import static com.bytechef.component.ai.llm.constant.LLMConstants.REASONING_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.RESPONSE; +import static com.bytechef.component.ai.llm.constant.LLMConstants.RESPONSE_FORMAT; +import static com.bytechef.component.ai.llm.constant.LLMConstants.RESPONSE_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.SEED; +import static com.bytechef.component.ai.llm.constant.LLMConstants.SEED_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.STOP; +import static com.bytechef.component.ai.llm.constant.LLMConstants.STOP_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.SYSTEM_PROMPT_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.TEMPERATURE; +import static com.bytechef.component.ai.llm.constant.LLMConstants.TEMPERATURE_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.TOP_K; +import static com.bytechef.component.ai.llm.constant.LLMConstants.TOP_K_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.TOP_P; +import static com.bytechef.component.ai.llm.constant.LLMConstants.TOP_P_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.USER; +import static com.bytechef.component.ai.llm.constant.LLMConstants.USER_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.VERBOSITY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.VERBOSITY_PROPERTY; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.CHAT_MODEL_PROPERTY; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.LOGPROBS; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.LOGPROBS_PROPERTY; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.MAX_COMPLETION_TOKENS; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.MAX_COMPLETION_TOKENS_PROPERTY; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.SUPPORTED_PARAMETERS; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.SUPPORTED_PARAMETERS_PROPERTY; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.TOP_LOGPROBS; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.TOP_LOGPROBS_PROPERTY; +import static com.bytechef.component.definition.Authorization.TOKEN; +import static com.bytechef.component.definition.ComponentDsl.action; + +import com.bytechef.component.ai.llm.ChatModel; +import com.bytechef.component.ai.llm.open.router.model.OpenRouterChatModel; +import com.bytechef.component.ai.llm.util.ModelUtils; +import com.bytechef.component.definition.ActionContext; +import com.bytechef.component.definition.ComponentDsl.ModifiableActionDefinition; +import com.bytechef.component.definition.Parameters; +import com.bytechef.component.definition.TypeReference; + +/** + * @author Marko Kriskovic + */ +public class OpenRouterChatAction { + + public static final ModifiableActionDefinition ACTION_DEFINITION = action(ASK) + .title("Ask") + .description("Ask anything you want.") + .properties( + SUPPORTED_PARAMETERS_PROPERTY, + CHAT_MODEL_PROPERTY, + PROMPT_PROPERTY, + FORMAT_PROPERTY, + SYSTEM_PROMPT_PROPERTY, + ATTACHMENTS_PROPERTY, + MESSAGES_PROPERTY, + RESPONSE_PROPERTY, + FREQUENCY_PENALTY_PROPERTY + .displayCondition("contains(%s, 'frequency_penalty')".formatted(SUPPORTED_PARAMETERS)), + LOGIT_BIAS_PROPERTY + .displayCondition("contains(%s, 'logit_bias')".formatted(SUPPORTED_PARAMETERS)), + LOGPROBS_PROPERTY + .displayCondition("contains(%s, 'logprobs')".formatted(SUPPORTED_PARAMETERS)), + MAX_COMPLETION_TOKENS_PROPERTY + .displayCondition("contains(%s, 'max_completion_tokens')".formatted(SUPPORTED_PARAMETERS)), + MAX_TOKENS_PROPERTY + .displayCondition("contains(%s, 'max_tokens')".formatted(SUPPORTED_PARAMETERS)), + PRESENCE_PENALTY_PROPERTY + .displayCondition("contains(%s, 'presence_penalty')".formatted(SUPPORTED_PARAMETERS)), + REASONING_PROPERTY + .displayCondition("contains(%s, 'reasoning')".formatted(SUPPORTED_PARAMETERS)), + SEED_PROPERTY + .displayCondition("contains(%s, 'seed')".formatted(SUPPORTED_PARAMETERS)), + STOP_PROPERTY + .displayCondition("contains(%s, 'stop')".formatted(SUPPORTED_PARAMETERS)), + TEMPERATURE_PROPERTY + .displayCondition("contains(%s, 'temperature')".formatted(SUPPORTED_PARAMETERS)), + TOP_LOGPROBS_PROPERTY + .displayCondition("contains(%s, 'top_logprobs')".formatted(SUPPORTED_PARAMETERS)), + TOP_K_PROPERTY + .displayCondition("contains(%s, 'top_k')".formatted(SUPPORTED_PARAMETERS)), + TOP_P_PROPERTY + .displayCondition("contains(%s, 'top_p')".formatted(SUPPORTED_PARAMETERS)), + VERBOSITY_PROPERTY + .displayCondition("contains(%s, 'verbosity')".formatted(SUPPORTED_PARAMETERS)), + USER_PROPERTY) + .output(ModelUtils::output) + .perform(OpenRouterChatAction::perform); + + public static final ChatModel CHAT_MODEL = (inputParameters, connectionParameters, responseFormatRequired) -> { + boolean jsonFormat = false; + + if (responseFormatRequired) { + ChatModel.ResponseFormat responseFormat = inputParameters.getRequiredFromPath( + RESPONSE + "." + RESPONSE_FORMAT, ChatModel.ResponseFormat.class); + + jsonFormat = !responseFormat.equals(TEXT); + } + + return OpenRouterChatModel.builder() + .apiKey(connectionParameters.getString(TOKEN)) + .model(inputParameters.getRequiredString(MODEL)) + .frequencyPenalty(inputParameters.getDouble(FREQUENCY_PENALTY)) + .logitBias(inputParameters.getMap(LOGIT_BIAS, new TypeReference<>() {})) + .logprobs(inputParameters.getBoolean(LOGPROBS)) + .maxCompletionTokens(inputParameters.getInteger(MAX_COMPLETION_TOKENS)) + .maxTokens(inputParameters.getInteger(MAX_TOKENS)) + .presencePenalty(inputParameters.getDouble(PRESENCE_PENALTY)) + .reasoning(inputParameters.getString(REASONING)) + .jsonResponseFormat(jsonFormat) + .seed(inputParameters.getInteger(SEED)) + .stop(inputParameters.getList(STOP, new TypeReference<>() {})) + .temperature(inputParameters.getDouble(TEMPERATURE)) + .topK(inputParameters.getDouble(TOP_K)) + .topLogprobs(inputParameters.getInteger(TOP_LOGPROBS)) + .topP(inputParameters.getDouble(TOP_P)) + .verbosity(inputParameters.getString(VERBOSITY)) + .user(inputParameters.getString(USER)) + .build(); + }; + + private OpenRouterChatAction() { + } + + public static Object perform(Parameters inputParameters, Parameters connectionParameters, ActionContext context) { + return CHAT_MODEL.getResponse(inputParameters, connectionParameters, context); + } +} diff --git a/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/cluster/OpenRouterChatModel.java b/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/cluster/OpenRouterChatModel.java new file mode 100644 index 00000000000..cd1167d567d --- /dev/null +++ b/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/cluster/OpenRouterChatModel.java @@ -0,0 +1,94 @@ +/* + * Copyright 2025 ByteChef + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.bytechef.component.ai.llm.open.router.cluster; + +import static com.bytechef.component.ai.llm.constant.LLMConstants.FREQUENCY_PENALTY_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.LOGIT_BIAS_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.MAX_TOKENS_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.PRESENCE_PENALTY_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.REASONING_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.SEED_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.STOP_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.TEMPERATURE_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.TOP_K_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.TOP_P_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.USER_PROPERTY; +import static com.bytechef.component.ai.llm.constant.LLMConstants.VERBOSITY_PROPERTY; +import static com.bytechef.component.ai.llm.open.router.action.OpenRouterChatAction.CHAT_MODEL; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.CHAT_MODEL_PROPERTY; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.LOGPROBS_PROPERTY; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.MAX_COMPLETION_TOKENS_PROPERTY; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.SUPPORTED_PARAMETERS; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.SUPPORTED_PARAMETERS_PROPERTY; +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.TOP_LOGPROBS_PROPERTY; + +import com.bytechef.component.definition.ClusterElementDefinition; +import com.bytechef.component.definition.ComponentDsl; +import com.bytechef.component.definition.Parameters; +import com.bytechef.platform.component.definition.ai.agent.ModelFunction; +import org.springframework.ai.chat.model.ChatModel; + +/** + * @author Marko Kriskovic + */ +public class OpenRouterChatModel { + + public static final ClusterElementDefinition CLUSTER_ELEMENT_DEFINITION = + ComponentDsl.clusterElement("model") + .title("Open Router Model") + .description("Open Router model.") + .type(ModelFunction.MODEL) + .object(() -> OpenRouterChatModel::apply) + .properties( + SUPPORTED_PARAMETERS_PROPERTY, + CHAT_MODEL_PROPERTY, + FREQUENCY_PENALTY_PROPERTY + .displayCondition("contains(%s, 'frequency_penalty')".formatted(SUPPORTED_PARAMETERS)), + LOGIT_BIAS_PROPERTY + .displayCondition("contains(%s, 'logit_bias')".formatted(SUPPORTED_PARAMETERS)), + LOGPROBS_PROPERTY + .displayCondition("contains(%s, 'logprobs')".formatted(SUPPORTED_PARAMETERS)), + MAX_COMPLETION_TOKENS_PROPERTY + .displayCondition("contains(%s, 'max_completion_tokens')".formatted(SUPPORTED_PARAMETERS)), + MAX_TOKENS_PROPERTY + .displayCondition("contains(%s, 'max_tokens')".formatted(SUPPORTED_PARAMETERS)), + PRESENCE_PENALTY_PROPERTY + .displayCondition("contains(%s, 'presence_penalty')".formatted(SUPPORTED_PARAMETERS)), + REASONING_PROPERTY + .displayCondition("contains(%s, 'reasoning')".formatted(SUPPORTED_PARAMETERS)), + SEED_PROPERTY + .displayCondition("contains(%s, 'seed')".formatted(SUPPORTED_PARAMETERS)), + STOP_PROPERTY + .displayCondition("contains(%s, 'stop')".formatted(SUPPORTED_PARAMETERS)), + TEMPERATURE_PROPERTY + .displayCondition("contains(%s, 'temperature')".formatted(SUPPORTED_PARAMETERS)), + TOP_LOGPROBS_PROPERTY + .displayCondition("contains(%s, 'top_logprobs')".formatted(SUPPORTED_PARAMETERS)), + TOP_K_PROPERTY + .displayCondition("contains(%s, 'top_k')".formatted(SUPPORTED_PARAMETERS)), + TOP_P_PROPERTY + .displayCondition("contains(%s, 'top_p')".formatted(SUPPORTED_PARAMETERS)), + VERBOSITY_PROPERTY + .displayCondition("contains(%s, 'verbosity')".formatted(SUPPORTED_PARAMETERS)), + USER_PROPERTY); + + protected static ChatModel apply( + Parameters inputParameters, Parameters connectionParameters, boolean responseFormatRequired) { + + return CHAT_MODEL.createChatModel(inputParameters, connectionParameters, responseFormatRequired); + } +} diff --git a/server/libs/modules/components/ai/llm/hugging-face/src/main/java/com/bytechef/component/ai/llm/hugging/face/connection/HuggingFaceConnection.java b/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/connection/OpenRouterConnection.java similarity index 84% rename from server/libs/modules/components/ai/llm/hugging-face/src/main/java/com/bytechef/component/ai/llm/hugging/face/connection/HuggingFaceConnection.java rename to server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/connection/OpenRouterConnection.java index 4c899ba4cde..759a69eb1cf 100644 --- a/server/libs/modules/components/ai/llm/hugging-face/src/main/java/com/bytechef/component/ai/llm/hugging/face/connection/HuggingFaceConnection.java +++ b/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/connection/OpenRouterConnection.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.bytechef.component.ai.llm.hugging.face.connection; +package com.bytechef.component.ai.llm.open.router.connection; import static com.bytechef.component.definition.Authorization.AuthorizationType.BEARER_TOKEN; import static com.bytechef.component.definition.Authorization.TOKEN; @@ -25,13 +25,12 @@ import com.bytechef.component.definition.ComponentDsl.ModifiableConnectionDefinition; /** - * @author Monika Domiter * @author Marko Kriskovic */ -public final class HuggingFaceConnection { +public final class OpenRouterConnection { public static final ModifiableConnectionDefinition CONNECTION_DEFINITION = connection() - .baseUri((connectionParameters, context) -> "https://api-inference.huggingface.co") + .baseUri((connectionParameters, context) -> "https://openrouter.ai/api/v1") .authorizations( authorization(BEARER_TOKEN) .title("Bearer Token") @@ -40,6 +39,6 @@ public final class HuggingFaceConnection { .label("Token") .required(true))); - private HuggingFaceConnection() { + private OpenRouterConnection() { } } diff --git a/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/constant/OpenRouterConstants.java b/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/constant/OpenRouterConstants.java new file mode 100644 index 00000000000..b86d296435c --- /dev/null +++ b/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/constant/OpenRouterConstants.java @@ -0,0 +1,100 @@ +/* + * Copyright 2025 ByteChef + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.bytechef.component.ai.llm.open.router.constant; + +import static com.bytechef.component.ai.llm.constant.LLMConstants.MODEL; +import static com.bytechef.component.ai.llm.open.router.util.OpenRouterUtils.getOpenRouterModels; +import static com.bytechef.component.definition.ComponentDsl.array; +import static com.bytechef.component.definition.ComponentDsl.bool; +import static com.bytechef.component.definition.ComponentDsl.integer; +import static com.bytechef.component.definition.ComponentDsl.option; +import static com.bytechef.component.definition.ComponentDsl.string; + +import com.bytechef.component.definition.ComponentDsl; +import com.bytechef.component.definition.ComponentDsl.ModifiableBooleanProperty; +import com.bytechef.component.definition.ComponentDsl.ModifiableIntegerProperty; +import com.bytechef.component.definition.Option; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +/** + * @author Marko Kriskovic + */ +public class OpenRouterConstants { + + public static final String LOGPROBS = "logprobs"; + public static final String MAX_COMPLETION_TOKENS = "maxCompletionTokens"; + public static final String SUPPORTED_PARAMETERS = "supportedParameters"; + public static final String TOP_LOGPROBS = "topLogprobs"; + + public static final ModifiableBooleanProperty LOGPROBS_PROPERTY = bool(LOGPROBS) + .label("Logprobs") + .description("Return log probabilities.") + .required(false); + + public static final ModifiableIntegerProperty MAX_COMPLETION_TOKENS_PROPERTY = integer(MAX_COMPLETION_TOKENS) + .label("Max Completion Tokens") + .description("Maximum tokens in completion.") + .required(false); + + public static final ModifiableIntegerProperty TOP_LOGPROBS_PROPERTY = integer(TOP_LOGPROBS) + .label("Top Logprobs") + .description("Number of top log probabilities to return (0-20).") + .minValue(0) + .maxValue(20) + .required(false); + + public static final ComponentDsl.ModifiableStringProperty CHAT_MODEL_PROPERTY = string(MODEL) + .label("Model") + .description("ID of the model to use.") + .options(getOpenRouterModels()) + .optionsLookupDependsOn(SUPPORTED_PARAMETERS) + .required(true); + + public static final ComponentDsl.ModifiableArrayProperty SUPPORTED_PARAMETERS_PROPERTY = array(SUPPORTED_PARAMETERS) + .label("Supported parameters") + .description("Filter models by supported parameter") + .items(string()) + .options(getSupportedParametersOptions()) + .defaultValue("response_format") + .required(true); + + private OpenRouterConstants() { + } + + private static String[] getSupportedParametersString() { + return new String[] { + "frequency_penalty", + "include_reasoning", + "logit_bias", "logprobs", + "max_completion_tokens", "max_tokens", "min_p", + "parallel_tool_calls", "presence_penalty", + "reasoning", "reasoning_effort", "response_format", "repetition_penalty", + "seed", "stop", "structured_outputs", + "temperature", "tools", "tool_choice", "top_a", "top_k", "top_p", "top_logprobs", + "verbosity", + "web_search_options" + }; + } + + private static List> getSupportedParametersOptions() { + return Arrays.stream(getSupportedParametersString()) + .map(param -> option(param, param)) + .collect(Collectors.toList()); + } +} diff --git a/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/model/OpenRouterChatModel.java b/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/model/OpenRouterChatModel.java new file mode 100644 index 00000000000..10f48d17961 --- /dev/null +++ b/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/model/OpenRouterChatModel.java @@ -0,0 +1,352 @@ +/* + * Copyright 2025 ByteChef + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.bytechef.component.ai.llm.open.router.model; + +import com.bytechef.component.ai.llm.util.ModelUtils; +import java.util.ArrayList; +import java.util.Base64; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.springframework.ai.chat.messages.AssistantMessage; +import org.springframework.ai.chat.messages.Message; +import org.springframework.ai.chat.messages.MessageType; +import org.springframework.ai.chat.messages.UserMessage; +import org.springframework.ai.chat.model.ChatResponse; +import org.springframework.ai.chat.model.Generation; +import org.springframework.ai.chat.prompt.Prompt; +import org.springframework.ai.content.Media; +import org.springframework.core.ParameterizedTypeReference; +import org.springframework.http.HttpHeaders; +import org.springframework.http.MediaType; +import org.springframework.web.client.RestClient; + +/** + * @author Marko Kriskovic + */ +public class OpenRouterChatModel implements org.springframework.ai.chat.model.ChatModel { + + private static final String BASE_URL = "https://openrouter.ai/api/v1"; + + private final RestClient restClient; + private final String model; + private final Double frequencyPenalty; + private final Map logitBias; + private final Boolean logprobs; + private final Integer maxCompletionTokens; + private final Integer maxTokens; + private final Double presencePenalty; + private final String reasoning; + private final boolean jsonResponseFormat; + private final Integer seed; + private final List stop; + private final Double temperature; + private final Double topK; + private final Integer topLogprobs; + private final Double topP; + private final String user; + private final String verbosity; + + private OpenRouterChatModel(Builder builder) { + this.restClient = ModelUtils.getRestClientBuilder() + .baseUrl(BASE_URL) + .defaultHeader(HttpHeaders.AUTHORIZATION, "Bearer " + builder.apiKey) + .build(); + this.model = builder.model; + this.frequencyPenalty = builder.frequencyPenalty; + this.logitBias = builder.logitBias; + this.logprobs = builder.logprobs; + this.maxCompletionTokens = builder.maxCompletionTokens; + this.maxTokens = builder.maxTokens; + this.presencePenalty = builder.presencePenalty; + this.reasoning = builder.reasoning; + this.jsonResponseFormat = builder.jsonResponseFormat; + this.seed = builder.seed; + this.stop = builder.stop; + this.temperature = builder.temperature; + this.topK = builder.topK; + this.topLogprobs = builder.topLogprobs; + this.topP = builder.topP; + this.user = builder.user; + this.verbosity = builder.verbosity; + } + + public static Builder builder() { + return new Builder(); + } + + @Override + public ChatResponse call(Prompt prompt) { + List> messages = buildMessages(prompt.getInstructions()); + Map body = buildRequestBody(messages); + + Map response = restClient.post() + .uri("/chat/completions") + .contentType(MediaType.APPLICATION_JSON) + .body(body) + .retrieve() + .body(new ParameterizedTypeReference<>() {}); + + return buildChatResponse(response); + } + + private List> buildMessages(List messages) { + List> result = new ArrayList<>(); + + for (Message message : messages) { + MessageType messageType = message.getMessageType(); + + if (messageType == MessageType.USER) { + UserMessage userMessage = (UserMessage) message; + List media = userMessage.getMedia(); + + if (media == null || media.isEmpty()) { + result.add(Map.of("role", "user", "content", message.getText())); + } else { + result.add(buildUserMessageWithMedia(message.getText(), media)); + } + } else if (messageType == MessageType.SYSTEM) { + result.add(Map.of("role", "system", "content", message.getText())); + } else if (messageType == MessageType.ASSISTANT) { + result.add(Map.of("role", "assistant", "content", message.getText())); + } + } + + return result; + } + + private Map buildUserMessageWithMedia(String text, List media) { + List> contentParts = new ArrayList<>(); + StringBuilder textBuilder = new StringBuilder(text); + + for (Media attachment : media) { + String mimeType = attachment.getMimeType() + .toString(); + + if (mimeType.startsWith("image/")) { + byte[] bytes = attachment.getDataAsByteArray(); + String base64 = Base64.getEncoder() + .encodeToString(bytes); + + contentParts.add(Map.of( + "type", "image_url", + "image_url", Map.of("url", "data:" + mimeType + ";base64," + base64))); + } + } + + contentParts.addFirst(Map.of("type", "text", "text", textBuilder.toString())); + + return Map.of("role", "user", "content", contentParts); + } + + private Map buildRequestBody(List> messages) { + Map body = new HashMap<>(); + + body.put("model", model); + body.put("messages", messages); + + if (frequencyPenalty != null) { + body.put("frequency_penalty", frequencyPenalty); + } + + if (logitBias != null) { + body.put("logit_bias", logitBias); + } + + if (logprobs != null) { + body.put("logprobs", logprobs); + } + + if (maxCompletionTokens != null) { + body.put("max_completion_tokens", maxCompletionTokens); + } + + if (maxTokens != null) { + body.put("max_tokens", maxTokens); + } + + if (presencePenalty != null) { + body.put("presence_penalty", presencePenalty); + } + + if (reasoning != null) { + body.put("reasoning", reasoning); + } + + if (seed != null) { + body.put("seed", seed); + } + + if (stop != null && !stop.isEmpty()) { + body.put("stop", stop); + } + + if (temperature != null) { + body.put("temperature", temperature); + } + + if (topK != null) { + body.put("top_k", topK); + } + + if (topP != null) { + body.put("top_p", topP); + } + + if (topLogprobs != null) { + body.put("top_logprobs", topLogprobs); + } + + if (user != null) { + body.put("user", user); + } + + if (verbosity != null) { + body.put("verbosity", verbosity); + } + + body.put("response_format", Map.of("type", jsonResponseFormat ? "json_object" : "text")); + + return body; + } + + @SuppressWarnings("unchecked") + private ChatResponse buildChatResponse(Map response) { + List> choices = (List>) response.get("choices"); + Map message = (Map) choices.getFirst() + .get("message"); + String content = (String) message.get("content"); + + return new ChatResponse(List.of(new Generation(new AssistantMessage(content)))); + } + + public static class Builder { + + private String apiKey; + private String model; + private Double frequencyPenalty; + private Map logitBias; + private Boolean logprobs; + private Integer maxCompletionTokens; + private Integer maxTokens; + private Double presencePenalty; + private String reasoning; + private boolean jsonResponseFormat; + private Integer seed; + private List stop; + private Double temperature; + private Double topK; + private Integer topLogprobs; + private Double topP; + private String user; + private String verbosity; + + public Builder apiKey(String apiKey) { + this.apiKey = apiKey; + return this; + } + + public Builder frequencyPenalty(Double frequencyPenalty) { + this.frequencyPenalty = frequencyPenalty; + return this; + } + + public Builder jsonResponseFormat(boolean jsonResponseFormat) { + this.jsonResponseFormat = jsonResponseFormat; + return this; + } + + public Builder logitBias(Map logitBias) { + this.logitBias = logitBias == null ? null : new HashMap<>(logitBias); + return this; + } + + public Builder logprobs(Boolean logprobs) { + this.logprobs = logprobs; + return this; + } + + public Builder maxCompletionTokens(Integer maxCompletionTokens) { + this.maxCompletionTokens = maxCompletionTokens; + return this; + } + + public Builder maxTokens(Integer maxTokens) { + this.maxTokens = maxTokens; + return this; + } + + public Builder model(String model) { + this.model = model; + return this; + } + + public Builder presencePenalty(Double presencePenalty) { + this.presencePenalty = presencePenalty; + return this; + } + + public Builder reasoning(String reasoning) { + this.reasoning = reasoning; + return this; + } + + public Builder seed(Integer seed) { + this.seed = seed; + return this; + } + + public Builder stop(List stop) { + this.stop = stop == null ? null : new ArrayList<>(stop); + return this; + } + + public Builder temperature(Double temperature) { + this.temperature = temperature; + return this; + } + + public Builder topK(Double topK) { + this.topK = topK; + return this; + } + + public Builder topLogprobs(Integer topLogprobs) { + this.topLogprobs = topLogprobs; + return this; + } + + public Builder topP(Double topP) { + this.topP = topP; + return this; + } + + public Builder user(String user) { + this.user = user; + return this; + } + + public Builder verbosity(String verbosity) { + this.verbosity = verbosity; + return this; + } + + public OpenRouterChatModel build() { + return new OpenRouterChatModel(this); + } + } +} diff --git a/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/util/OpenRouterUtils.java b/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/util/OpenRouterUtils.java new file mode 100644 index 00000000000..7d81410b45e --- /dev/null +++ b/server/libs/modules/components/ai/llm/open-router/src/main/java/com/bytechef/component/ai/llm/open/router/util/OpenRouterUtils.java @@ -0,0 +1,72 @@ +/* + * Copyright 2025 ByteChef + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.bytechef.component.ai.llm.open.router.util; + +import static com.bytechef.component.ai.llm.open.router.constant.OpenRouterConstants.SUPPORTED_PARAMETERS; +import static com.bytechef.component.definition.ComponentDsl.option; + +import com.bytechef.component.definition.ActionDefinition; +import com.bytechef.component.definition.Context; +import com.bytechef.component.definition.TypeReference; +import java.util.Comparator; +import java.util.List; +import java.util.stream.Collectors; + +/** + * @author Marko Kriskovic + */ +public class OpenRouterUtils { + public static ActionDefinition.OptionsFunction getOpenRouterModels() { + return (inputParameters, connectionParameters, lookupDependsOnPaths, searchText, context) -> { + List supportedParametersArray = inputParameters.getList(SUPPORTED_PARAMETERS, String.class); + String supportedParametersString = supportedParametersArray != null && !supportedParametersArray.isEmpty() + ? String.join(",", supportedParametersArray) + : null; + + Context.Http.Executor executor = context.http(http -> http.get("/models")) + .configuration(Context.Http.responseType(Context.Http.ResponseType.JSON)) + .queryParameter("output_modalities", "text"); + + if (supportedParametersString != null) { + executor = executor.queryParameter("supported_parameters", supportedParametersString); + } + + ModelsResponse response = executor.execute() + .getBody(new TypeReference<>() {}); + + return response.data() + .stream() + .filter((model) -> Double.parseDouble(model.pricing.completion) >= 0) + .sorted(Comparator.comparingDouble((OpenRouterModel model) -> Double.parseDouble(model.pricing() + .completion())) + .thenComparing(OpenRouterModel::name)) + .map(model -> option(model.name() + " - $" + model.pricing() + .completion(), model.id(), model.description())) + .collect(Collectors.toList()); + }; + } + + private record Pricing(String completion, String prompt, String request) { + } + + private record OpenRouterModel(String name, String id, String description, Pricing pricing, + List supported_parameters) { + } + + private record ModelsResponse(List data) { + } +} diff --git a/server/libs/modules/components/ai/llm/open-router/src/main/resources/assets/open-router.svg b/server/libs/modules/components/ai/llm/open-router/src/main/resources/assets/open-router.svg new file mode 100644 index 00000000000..e6cca2a869b --- /dev/null +++ b/server/libs/modules/components/ai/llm/open-router/src/main/resources/assets/open-router.svg @@ -0,0 +1 @@ +OpenRouter \ No newline at end of file diff --git a/server/libs/modules/components/ai/llm/hugging-face/src/test/java/com/bytechef/component/ai/llm/hugging/face/HuggingFaceComponentHandlerTest.java b/server/libs/modules/components/ai/llm/open-router/src/test/java/com/bytechef/component/ai/llm/open/router/OpenRouterComponentHandlerTest.java similarity index 74% rename from server/libs/modules/components/ai/llm/hugging-face/src/test/java/com/bytechef/component/ai/llm/hugging/face/HuggingFaceComponentHandlerTest.java rename to server/libs/modules/components/ai/llm/open-router/src/test/java/com/bytechef/component/ai/llm/open/router/OpenRouterComponentHandlerTest.java index 6b1c8694c88..0acb2ce24de 100644 --- a/server/libs/modules/components/ai/llm/hugging-face/src/test/java/com/bytechef/component/ai/llm/hugging/face/HuggingFaceComponentHandlerTest.java +++ b/server/libs/modules/components/ai/llm/open-router/src/test/java/com/bytechef/component/ai/llm/open/router/OpenRouterComponentHandlerTest.java @@ -14,19 +14,18 @@ * limitations under the License. */ -package com.bytechef.component.ai.llm.hugging.face; +package com.bytechef.component.ai.llm.open.router; import com.bytechef.test.jsonasssert.JsonFileAssert; import org.junit.jupiter.api.Test; /** - * @author Monika Domiter + * @author Marko Kriskovic */ -class HuggingFaceComponentHandlerTest { +class OpenRouterComponentHandlerTest { @Test void testGetComponentDefinition() { - JsonFileAssert.assertEquals( - "definition/hugging-face_v1.json", new HuggingFaceComponentHandler().getDefinition()); + JsonFileAssert.assertEquals("definition/oper-router_v1.json", new OpenRouterComponentHandler().getDefinition()); } } diff --git a/server/libs/modules/components/ai/llm/open-router/src/test/resources/definition/oper-router_v1.json b/server/libs/modules/components/ai/llm/open-router/src/test/resources/definition/oper-router_v1.json new file mode 100644 index 00000000000..4460ff6e048 --- /dev/null +++ b/server/libs/modules/components/ai/llm/open-router/src/test/resources/definition/oper-router_v1.json @@ -0,0 +1,1701 @@ +{ + "actions": [ { + "batch": null, + "beforeResume": null, + "beforeSuspend": null, + "beforeTimeoutResume": null, + "deprecated": null, + "description": "Ask anything you want.", + "help": null, + "metadata": null, + "name": "ask", + "outputDefinition": { + "output": { }, + "outputResponse": null, + "outputSchema": null, + "sampleOutput": null + }, + "perform": { }, + "processErrorResponse": null, + "properties": [ { + "advancedOption": null, + "controlType": "MULTI_SELECT", + "defaultValue": [ "response_format" ], + "description": "Filter models by supported parameter", + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "items": [ { + "advancedOption": null, + "controlType": "TEXT", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": null, + "type": "STRING" + } ], + "label": "Supported parameters", + "maxItems": null, + "metadata": { }, + "minItems": null, + "multipleValues": null, + "name": "supportedParameters", + "options": [ { + "description": null, + "label": "frequency_penalty", + "value": "frequency_penalty" + }, { + "description": null, + "label": "include_reasoning", + "value": "include_reasoning" + }, { + "description": null, + "label": "logit_bias", + "value": "logit_bias" + }, { + "description": null, + "label": "logprobs", + "value": "logprobs" + }, { + "description": null, + "label": "max_completion_tokens", + "value": "max_completion_tokens" + }, { + "description": null, + "label": "max_tokens", + "value": "max_tokens" + }, { + "description": null, + "label": "min_p", + "value": "min_p" + }, { + "description": null, + "label": "parallel_tool_calls", + "value": "parallel_tool_calls" + }, { + "description": null, + "label": "presence_penalty", + "value": "presence_penalty" + }, { + "description": null, + "label": "reasoning", + "value": "reasoning" + }, { + "description": null, + "label": "reasoning_effort", + "value": "reasoning_effort" + }, { + "description": null, + "label": "response_format", + "value": "response_format" + }, { + "description": null, + "label": "repetition_penalty", + "value": "repetition_penalty" + }, { + "description": null, + "label": "seed", + "value": "seed" + }, { + "description": null, + "label": "stop", + "value": "stop" + }, { + "description": null, + "label": "structured_outputs", + "value": "structured_outputs" + }, { + "description": null, + "label": "temperature", + "value": "temperature" + }, { + "description": null, + "label": "tools", + "value": "tools" + }, { + "description": null, + "label": "tool_choice", + "value": "tool_choice" + }, { + "description": null, + "label": "top_a", + "value": "top_a" + }, { + "description": null, + "label": "top_k", + "value": "top_k" + }, { + "description": null, + "label": "top_p", + "value": "top_p" + }, { + "description": null, + "label": "top_logprobs", + "value": "top_logprobs" + }, { + "description": null, + "label": "verbosity", + "value": "verbosity" + }, { + "description": null, + "label": "web_search_options", + "value": "web_search_options" + } ], + "optionsDataSource": null, + "placeholder": null, + "required": true, + "type": "ARRAY" + }, { + "advancedOption": null, + "controlType": "SELECT", + "defaultValue": null, + "description": "ID of the model to use.", + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Model", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "model", + "options": null, + "optionsDataSource": { + "options": { }, + "optionsLookupDependsOn": [ "supportedParameters" ] + }, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + }, { + "advancedOption": null, + "controlType": "TEXT_AREA", + "defaultValue": null, + "description": "User prompt to the model.", + "displayCondition": "format == 'SIMPLE'", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Prompt", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "userPrompt", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + }, { + "advancedOption": null, + "controlType": "SELECT", + "defaultValue": "SIMPLE", + "description": "Format of providing the prompt to the model.", + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Format", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "format", + "options": [ { + "description": "User prompt and optional system prompt.", + "label": "Simple", + "value": "SIMPLE" + }, { + "description": "Full control over the messages sent to the model.", + "label": "Advanced", + "value": "ADVANCED" + } ], + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + }, { + "advancedOption": null, + "controlType": "TEXT_AREA", + "defaultValue": null, + "description": "System prompt to the model.", + "displayCondition": "format == 'SIMPLE'", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "System Prompt", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "systemPrompt", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": false, + "type": "STRING" + }, { + "advancedOption": null, + "controlType": "ARRAY_BUILDER", + "defaultValue": null, + "description": "Only text and image files are supported. Also, only certain models supports images. Please check the documentation.", + "displayCondition": "format == 'SIMPLE'", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "items": [ { + "advancedOption": null, + "controlType": "FILE_ENTRY", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "metadata": { }, + "name": null, + "placeholder": null, + "properties": [ { + "advancedOption": null, + "controlType": "TEXT", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "extension", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + }, { + "advancedOption": null, + "controlType": "TEXT", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "mimeType", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + }, { + "advancedOption": null, + "controlType": "TEXT", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "name", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + }, { + "advancedOption": null, + "controlType": "TEXT", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "url", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + } ], + "required": null, + "type": "FILE_ENTRY" + } ], + "label": "Attachments", + "maxItems": null, + "metadata": { }, + "minItems": null, + "multipleValues": null, + "name": "attachments", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": false, + "type": "ARRAY" + }, { + "advancedOption": null, + "controlType": "ARRAY_BUILDER", + "defaultValue": null, + "description": "A list of messages comprising the conversation so far.", + "displayCondition": "format == 'ADVANCED'", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "items": [ { + "additionalProperties": null, + "advancedOption": null, + "controlType": "OBJECT_BUILDER", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Message", + "metadata": { }, + "multipleValues": null, + "name": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "properties": [ { + "advancedOption": null, + "controlType": "SELECT", + "defaultValue": null, + "description": "The role of the messages author.", + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Role", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "role", + "options": [ { + "description": null, + "label": "System", + "value": "SYSTEM" + }, { + "description": null, + "label": "User", + "value": "USER" + }, { + "description": null, + "label": "Assistant", + "value": "ASSISTANT" + } ], + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + }, { + "advancedOption": null, + "controlType": "TEXT_AREA", + "defaultValue": null, + "description": "The contents of the message.", + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Content", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "content", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + }, { + "advancedOption": null, + "controlType": "ARRAY_BUILDER", + "defaultValue": null, + "description": "Only text and image files are supported. Also, only certain models supports images. Please check the documentation.", + "displayCondition": "messages[index].role == 'USER'", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "items": [ { + "advancedOption": null, + "controlType": "FILE_ENTRY", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "metadata": { }, + "name": null, + "placeholder": null, + "properties": [ { + "advancedOption": null, + "controlType": "TEXT", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "extension", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + }, { + "advancedOption": null, + "controlType": "TEXT", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "mimeType", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + }, { + "advancedOption": null, + "controlType": "TEXT", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "name", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + }, { + "advancedOption": null, + "controlType": "TEXT", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "url", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + } ], + "required": null, + "type": "FILE_ENTRY" + } ], + "label": "Attachments", + "maxItems": null, + "metadata": { }, + "minItems": null, + "multipleValues": null, + "name": "attachments", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": false, + "type": "ARRAY" + } ], + "required": null, + "type": "OBJECT" + } ], + "label": "Messages", + "maxItems": null, + "metadata": { }, + "minItems": null, + "multipleValues": null, + "name": "messages", + "options": null, + "optionsDataSource": null, + "placeholder": "Add message", + "required": true, + "type": "ARRAY" + }, { + "additionalProperties": null, + "advancedOption": null, + "controlType": "OBJECT_BUILDER", + "defaultValue": null, + "description": "The response from the API.", + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Response", + "metadata": { }, + "multipleValues": null, + "name": "response", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "properties": [ { + "advancedOption": null, + "controlType": "SELECT", + "defaultValue": "TEXT", + "description": "In which format do you want the response to be in?", + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Response Format", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "responseFormat", + "options": [ { + "description": "Response as text", + "label": "Text", + "value": "TEXT" + }, { + "description": "Response as data in a simple format using keys and values", + "label": "Structured data", + "value": "JSON" + } ], + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + }, { + "advancedOption": null, + "controlType": "JSON_SCHEMA_BUILDER", + "defaultValue": null, + "description": "Define desired structure for the structured data response.", + "displayCondition": "response.responseFormat == 'JSON'", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Response Schema", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "responseSchema", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + } ], + "required": true, + "type": "OBJECT" + }, { + "advancedOption": true, + "controlType": "NUMBER", + "defaultValue": 0.0, + "description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.", + "displayCondition": "contains(supportedParameters, 'frequency_penalty')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Frequency Penalty", + "maxNumberPrecision": null, + "maxValue": 2.0, + "metadata": { }, + "minNumberPrecision": null, + "minValue": -2.0, + "name": "frequencyPenalty", + "numberPrecision": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "NUMBER" + }, { + "additionalProperties": [ { + "advancedOption": null, + "controlType": "NUMBER", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "maxNumberPrecision": null, + "maxValue": null, + "metadata": { }, + "minNumberPrecision": null, + "minValue": null, + "name": null, + "numberPrecision": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "NUMBER" + } ], + "advancedOption": true, + "controlType": "OBJECT_BUILDER", + "defaultValue": null, + "description": "Modify the likelihood of specified tokens appearing in the completion.", + "displayCondition": "contains(supportedParameters, 'logit_bias')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Logit Bias", + "metadata": { }, + "multipleValues": null, + "name": "logitBias", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "properties": null, + "required": null, + "type": "OBJECT" + }, { + "advancedOption": null, + "controlType": "SELECT", + "defaultValue": null, + "description": "Return log probabilities.", + "displayCondition": "contains(supportedParameters, 'logprobs')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Logprobs", + "metadata": { }, + "name": "logprobs", + "options": [ { + "description": null, + "label": "True", + "value": true + }, { + "description": null, + "label": "False", + "value": false + } ], + "placeholder": null, + "required": false, + "type": "BOOLEAN" + }, { + "advancedOption": null, + "controlType": "INTEGER", + "defaultValue": null, + "description": "Maximum tokens in completion.", + "displayCondition": "contains(supportedParameters, 'max_completion_tokens')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Max Completion Tokens", + "maxValue": null, + "metadata": { }, + "minValue": null, + "name": "maxCompletionTokens", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": false, + "type": "INTEGER" + }, { + "advancedOption": true, + "controlType": "INTEGER", + "defaultValue": null, + "description": "The maximum number of tokens to generate in the chat completion.", + "displayCondition": "contains(supportedParameters, 'max_tokens')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Max Tokens", + "maxValue": null, + "metadata": { }, + "minValue": null, + "name": "maxTokens", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "INTEGER" + }, { + "advancedOption": true, + "controlType": "NUMBER", + "defaultValue": 0.0, + "description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.", + "displayCondition": "contains(supportedParameters, 'presence_penalty')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Presence Penalty", + "maxNumberPrecision": null, + "maxValue": 2.0, + "metadata": { }, + "minNumberPrecision": null, + "minValue": -2.0, + "name": "presencePenalty", + "numberPrecision": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "NUMBER" + }, { + "advancedOption": true, + "controlType": "SELECT", + "defaultValue": null, + "description": "Constrains effort on reasoning. Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response. For reasoning models for gpt-5 and o-series models only.", + "displayCondition": "contains(supportedParameters, 'reasoning')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Reasoning effort", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "reasoning", + "options": [ { + "description": null, + "label": "none", + "value": "none" + }, { + "description": null, + "label": "minimal", + "value": "minimal" + }, { + "description": null, + "label": "low", + "value": "low" + }, { + "description": null, + "label": "medium", + "value": "medium" + }, { + "description": null, + "label": "high", + "value": "high" + }, { + "description": null, + "label": "maximal", + "value": "xhigh" + } ], + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": null, + "type": "STRING" + }, { + "advancedOption": true, + "controlType": "INTEGER", + "defaultValue": null, + "description": "Keeping the same seed would output the same response.", + "displayCondition": "contains(supportedParameters, 'seed')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Seed", + "maxValue": null, + "metadata": { }, + "minValue": null, + "name": "seed", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "INTEGER" + }, { + "advancedOption": true, + "controlType": "ARRAY_BUILDER", + "defaultValue": null, + "description": "Up to 4 sequences where the API will stop generating further tokens.", + "displayCondition": "contains(supportedParameters, 'stop')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "items": [ { + "advancedOption": null, + "controlType": "TEXT", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": null, + "type": "STRING" + } ], + "label": "Stop", + "maxItems": null, + "metadata": { }, + "minItems": null, + "multipleValues": null, + "name": "stop", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "ARRAY" + }, { + "advancedOption": true, + "controlType": "NUMBER", + "defaultValue": 1.0, + "description": "Controls randomness: Higher values will make the output more random, while lower values like will make it more focused and deterministic.", + "displayCondition": "contains(supportedParameters, 'temperature')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Temperature", + "maxNumberPrecision": null, + "maxValue": 2.0, + "metadata": { }, + "minNumberPrecision": null, + "minValue": 0.0, + "name": "temperature", + "numberPrecision": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "NUMBER" + }, { + "advancedOption": null, + "controlType": "INTEGER", + "defaultValue": null, + "description": "Number of top log probabilities to return (0-20).", + "displayCondition": "contains(supportedParameters, 'top_logprobs')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Top Logprobs", + "maxValue": 20, + "metadata": { }, + "minValue": 0, + "name": "topLogprobs", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": false, + "type": "INTEGER" + }, { + "advancedOption": true, + "controlType": "INTEGER", + "defaultValue": 1, + "description": "Specify the number of token choices the generative uses to generate the next token.", + "displayCondition": "contains(supportedParameters, 'top_k')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Top K", + "maxValue": null, + "metadata": { }, + "minValue": null, + "name": "topK", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "INTEGER" + }, { + "advancedOption": true, + "controlType": "NUMBER", + "defaultValue": 1.0, + "description": "An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.", + "displayCondition": "contains(supportedParameters, 'top_p')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Top P", + "maxNumberPrecision": null, + "maxValue": null, + "metadata": { }, + "minNumberPrecision": null, + "minValue": null, + "name": "topP", + "numberPrecision": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "NUMBER" + }, { + "advancedOption": true, + "controlType": "SELECT", + "defaultValue": null, + "description": "Adjusts response verbosity. Lower levels yield shorter answers.", + "displayCondition": "contains(supportedParameters, 'verbosity')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Verbosity", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "verbosity", + "options": [ { + "description": null, + "label": "low", + "value": "low" + }, { + "description": null, + "label": "medium", + "value": "medium" + }, { + "description": null, + "label": "high", + "value": "high" + } ], + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": null, + "type": "STRING" + }, { + "advancedOption": true, + "controlType": "TEXT", + "defaultValue": null, + "description": "A unique identifier representing your end-user, which can help admins to monitor and detect abuse.", + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "User", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "user", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": false, + "type": "STRING" + } ], + "resumePerform": null, + "title": "Ask", + "workflowNodeDescription": null + } ], + "clusterElements": [ { + "name": "model", + "description": "Open Router model.", + "element": { }, + "help": null, + "outputDefinition": null, + "processErrorResponse": null, + "properties": [ { + "advancedOption": null, + "controlType": "MULTI_SELECT", + "defaultValue": [ "response_format" ], + "description": "Filter models by supported parameter", + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "items": [ { + "advancedOption": null, + "controlType": "TEXT", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": null, + "type": "STRING" + } ], + "label": "Supported parameters", + "maxItems": null, + "metadata": { }, + "minItems": null, + "multipleValues": null, + "name": "supportedParameters", + "options": [ { + "description": null, + "label": "frequency_penalty", + "value": "frequency_penalty" + }, { + "description": null, + "label": "include_reasoning", + "value": "include_reasoning" + }, { + "description": null, + "label": "logit_bias", + "value": "logit_bias" + }, { + "description": null, + "label": "logprobs", + "value": "logprobs" + }, { + "description": null, + "label": "max_completion_tokens", + "value": "max_completion_tokens" + }, { + "description": null, + "label": "max_tokens", + "value": "max_tokens" + }, { + "description": null, + "label": "min_p", + "value": "min_p" + }, { + "description": null, + "label": "parallel_tool_calls", + "value": "parallel_tool_calls" + }, { + "description": null, + "label": "presence_penalty", + "value": "presence_penalty" + }, { + "description": null, + "label": "reasoning", + "value": "reasoning" + }, { + "description": null, + "label": "reasoning_effort", + "value": "reasoning_effort" + }, { + "description": null, + "label": "response_format", + "value": "response_format" + }, { + "description": null, + "label": "repetition_penalty", + "value": "repetition_penalty" + }, { + "description": null, + "label": "seed", + "value": "seed" + }, { + "description": null, + "label": "stop", + "value": "stop" + }, { + "description": null, + "label": "structured_outputs", + "value": "structured_outputs" + }, { + "description": null, + "label": "temperature", + "value": "temperature" + }, { + "description": null, + "label": "tools", + "value": "tools" + }, { + "description": null, + "label": "tool_choice", + "value": "tool_choice" + }, { + "description": null, + "label": "top_a", + "value": "top_a" + }, { + "description": null, + "label": "top_k", + "value": "top_k" + }, { + "description": null, + "label": "top_p", + "value": "top_p" + }, { + "description": null, + "label": "top_logprobs", + "value": "top_logprobs" + }, { + "description": null, + "label": "verbosity", + "value": "verbosity" + }, { + "description": null, + "label": "web_search_options", + "value": "web_search_options" + } ], + "optionsDataSource": null, + "placeholder": null, + "required": true, + "type": "ARRAY" + }, { + "advancedOption": null, + "controlType": "SELECT", + "defaultValue": null, + "description": "ID of the model to use.", + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Model", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "model", + "options": null, + "optionsDataSource": { + "options": { }, + "optionsLookupDependsOn": [ "supportedParameters" ] + }, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + }, { + "advancedOption": true, + "controlType": "NUMBER", + "defaultValue": 0.0, + "description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.", + "displayCondition": "contains(supportedParameters, 'frequency_penalty')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Frequency Penalty", + "maxNumberPrecision": null, + "maxValue": 2.0, + "metadata": { }, + "minNumberPrecision": null, + "minValue": -2.0, + "name": "frequencyPenalty", + "numberPrecision": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "NUMBER" + }, { + "additionalProperties": [ { + "advancedOption": null, + "controlType": "NUMBER", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "maxNumberPrecision": null, + "maxValue": null, + "metadata": { }, + "minNumberPrecision": null, + "minValue": null, + "name": null, + "numberPrecision": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "NUMBER" + } ], + "advancedOption": true, + "controlType": "OBJECT_BUILDER", + "defaultValue": null, + "description": "Modify the likelihood of specified tokens appearing in the completion.", + "displayCondition": "contains(supportedParameters, 'logit_bias')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Logit Bias", + "metadata": { }, + "multipleValues": null, + "name": "logitBias", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "properties": null, + "required": null, + "type": "OBJECT" + }, { + "advancedOption": null, + "controlType": "SELECT", + "defaultValue": null, + "description": "Return log probabilities.", + "displayCondition": "contains(supportedParameters, 'logprobs')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Logprobs", + "metadata": { }, + "name": "logprobs", + "options": [ { + "description": null, + "label": "True", + "value": true + }, { + "description": null, + "label": "False", + "value": false + } ], + "placeholder": null, + "required": false, + "type": "BOOLEAN" + }, { + "advancedOption": null, + "controlType": "INTEGER", + "defaultValue": null, + "description": "Maximum tokens in completion.", + "displayCondition": "contains(supportedParameters, 'max_completion_tokens')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Max Completion Tokens", + "maxValue": null, + "metadata": { }, + "minValue": null, + "name": "maxCompletionTokens", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": false, + "type": "INTEGER" + }, { + "advancedOption": true, + "controlType": "INTEGER", + "defaultValue": null, + "description": "The maximum number of tokens to generate in the chat completion.", + "displayCondition": "contains(supportedParameters, 'max_tokens')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Max Tokens", + "maxValue": null, + "metadata": { }, + "minValue": null, + "name": "maxTokens", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "INTEGER" + }, { + "advancedOption": true, + "controlType": "NUMBER", + "defaultValue": 0.0, + "description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.", + "displayCondition": "contains(supportedParameters, 'presence_penalty')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Presence Penalty", + "maxNumberPrecision": null, + "maxValue": 2.0, + "metadata": { }, + "minNumberPrecision": null, + "minValue": -2.0, + "name": "presencePenalty", + "numberPrecision": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "NUMBER" + }, { + "advancedOption": true, + "controlType": "SELECT", + "defaultValue": null, + "description": "Constrains effort on reasoning. Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response. For reasoning models for gpt-5 and o-series models only.", + "displayCondition": "contains(supportedParameters, 'reasoning')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Reasoning effort", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "reasoning", + "options": [ { + "description": null, + "label": "none", + "value": "none" + }, { + "description": null, + "label": "minimal", + "value": "minimal" + }, { + "description": null, + "label": "low", + "value": "low" + }, { + "description": null, + "label": "medium", + "value": "medium" + }, { + "description": null, + "label": "high", + "value": "high" + }, { + "description": null, + "label": "maximal", + "value": "xhigh" + } ], + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": null, + "type": "STRING" + }, { + "advancedOption": true, + "controlType": "INTEGER", + "defaultValue": null, + "description": "Keeping the same seed would output the same response.", + "displayCondition": "contains(supportedParameters, 'seed')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Seed", + "maxValue": null, + "metadata": { }, + "minValue": null, + "name": "seed", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "INTEGER" + }, { + "advancedOption": true, + "controlType": "ARRAY_BUILDER", + "defaultValue": null, + "description": "Up to 4 sequences where the API will stop generating further tokens.", + "displayCondition": "contains(supportedParameters, 'stop')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "items": [ { + "advancedOption": null, + "controlType": "TEXT", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": null, + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": null, + "type": "STRING" + } ], + "label": "Stop", + "maxItems": null, + "metadata": { }, + "minItems": null, + "multipleValues": null, + "name": "stop", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "ARRAY" + }, { + "advancedOption": true, + "controlType": "NUMBER", + "defaultValue": 1.0, + "description": "Controls randomness: Higher values will make the output more random, while lower values like will make it more focused and deterministic.", + "displayCondition": "contains(supportedParameters, 'temperature')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Temperature", + "maxNumberPrecision": null, + "maxValue": 2.0, + "metadata": { }, + "minNumberPrecision": null, + "minValue": 0.0, + "name": "temperature", + "numberPrecision": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "NUMBER" + }, { + "advancedOption": null, + "controlType": "INTEGER", + "defaultValue": null, + "description": "Number of top log probabilities to return (0-20).", + "displayCondition": "contains(supportedParameters, 'top_logprobs')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Top Logprobs", + "maxValue": 20, + "metadata": { }, + "minValue": 0, + "name": "topLogprobs", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": false, + "type": "INTEGER" + }, { + "advancedOption": true, + "controlType": "INTEGER", + "defaultValue": 1, + "description": "Specify the number of token choices the generative uses to generate the next token.", + "displayCondition": "contains(supportedParameters, 'top_k')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Top K", + "maxValue": null, + "metadata": { }, + "minValue": null, + "name": "topK", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "INTEGER" + }, { + "advancedOption": true, + "controlType": "NUMBER", + "defaultValue": 1.0, + "description": "An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.", + "displayCondition": "contains(supportedParameters, 'top_p')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Top P", + "maxNumberPrecision": null, + "maxValue": null, + "metadata": { }, + "minNumberPrecision": null, + "minValue": null, + "name": "topP", + "numberPrecision": null, + "options": null, + "optionsDataSource": null, + "placeholder": null, + "required": null, + "type": "NUMBER" + }, { + "advancedOption": true, + "controlType": "SELECT", + "defaultValue": null, + "description": "Adjusts response verbosity. Lower levels yield shorter answers.", + "displayCondition": "contains(supportedParameters, 'verbosity')", + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Verbosity", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "verbosity", + "options": [ { + "description": null, + "label": "low", + "value": "low" + }, { + "description": null, + "label": "medium", + "value": "medium" + }, { + "description": null, + "label": "high", + "value": "high" + } ], + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": null, + "type": "STRING" + }, { + "advancedOption": true, + "controlType": "TEXT", + "defaultValue": null, + "description": "A unique identifier representing your end-user, which can help admins to monitor and detect abuse.", + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "User", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "user", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": false, + "type": "STRING" + } ], + "title": "Open Router Model", + "type": { + "name": "MODEL", + "key": "model", + "label": "Model", + "multipleElements": false, + "required": true + }, + "workflowNodeDescription": null + } ], + "componentCategories": [ { + "name": "artificial-intelligence", + "label": "Artificial Intelligence" + } ], + "connection": { + "authorizationRequired": null, + "authorizations": [ { + "acquire": null, + "apply": null, + "authorizationCallback": null, + "authorizationUrl": null, + "clientId": null, + "clientSecret": null, + "description": null, + "detectOn": null, + "name": "bearer_token", + "oauth2AuthorizationExtraQueryParameters": null, + "pkce": null, + "properties": [ { + "advancedOption": null, + "controlType": "TEXT", + "defaultValue": null, + "description": null, + "displayCondition": null, + "exampleValue": null, + "expressionEnabled": null, + "hidden": null, + "label": "Token", + "languageId": null, + "maxLength": null, + "metadata": { }, + "minLength": null, + "name": "token", + "options": null, + "optionsDataSource": null, + "placeholder": null, + "regex": null, + "required": true, + "type": "STRING" + } ], + "refresh": null, + "refreshOn": null, + "refreshToken": null, + "refreshUrl": null, + "scopes": null, + "title": "Bearer Token", + "tokenUrl": null, + "type": "BEARER_TOKEN" + } ], + "baseUri": { }, + "help": null, + "processErrorResponse": null, + "properties": null, + "test": null, + "version": 1 + }, + "customAction": null, + "customActionHelp": null, + "description": "OpenRouter provides a unified API that gives you access to hundreds of AI models through a single endpoint, while automatically handling fallbacks and selecting the most cost-effective options.", + "icon": "path:assets/open-router.svg", + "metadata": null, + "name": "openRouter", + "resources": null, + "tags": null, + "title": "Open Router", + "triggers": null, + "unifiedApi": null, + "version": 1 +} \ No newline at end of file diff --git a/server/libs/modules/components/ai/universal/universal-text/build.gradle.kts b/server/libs/modules/components/ai/universal/universal-text/build.gradle.kts index cefe5286de5..764a84e4b46 100644 --- a/server/libs/modules/components/ai/universal/universal-text/build.gradle.kts +++ b/server/libs/modules/components/ai/universal/universal-text/build.gradle.kts @@ -14,6 +14,7 @@ dependencies { implementation(project(":server:libs:modules:components:ai:llm:mistral")) implementation(project(":server:libs:modules:components:ai:llm:nvidia")) implementation(project(":server:libs:modules:components:ai:llm:open-ai")) + implementation(project(":server:libs:modules:components:ai:llm:open-router")) implementation(project(":server:libs:modules:components:ai:llm:perplexity")) implementation(project(":server:libs:modules:components:ai:llm:vertex:gemini")) } diff --git a/settings.gradle.kts b/settings.gradle.kts index d782c335853..aa142012d2e 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -259,6 +259,7 @@ include("server:libs:modules:components:ai:llm:mistral") include("server:libs:modules:components:ai:llm:nvidia") include("server:libs:modules:components:ai:llm:ollama") include("server:libs:modules:components:ai:llm:open-ai") +include("server:libs:modules:components:ai:llm:open-router") include("server:libs:modules:components:ai:llm:perplexity") include("server:libs:modules:components:ai:llm:stability") include("server:libs:modules:components:ai:llm:vertex:gemini")