Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
dfbb9d6
feat: add task type configuration to LLM configs and AIGateway model …
Apr 8, 2026
0ffc6ec
feat: add task type configuration to LLM configs and AIGateway model …
Apr 2, 2026
3555968
Create unique uuid for namespace and return namespace for query user …
csg-pr-bot Apr 8, 2026
b3fedb3
aigateway: allow unauthenticated GET /v1/models (#959)
csg-pr-bot Apr 8, 2026
4c1f0e9
Fix mcp gateway session affinity, mcp server update issue and add in…
csg-pr-bot Apr 8, 2026
23991b8
Fix api/v1/spaces API with trending and sdk query bug (#961)
csg-pr-bot Apr 8, 2026
ecd237a
Fix skill upload bug when zip contains .git dir (#962)
csg-pr-bot Apr 8, 2026
6f01dd0
feat(aigateway): don't skip any balance check (#963)
csg-pr-bot Apr 8, 2026
e532004
Refactor check accounting uuid by namespace uuid for user or org (#965)
csg-pr-bot Apr 8, 2026
df6cc8e
use model repo_path as model id when invoking internal llm endpoint (…
csg-pr-bot Apr 9, 2026
ede0e44
fix: 修复组织充值报告中订单用户名显示错误问题 (#967)
csg-pr-bot Apr 9, 2026
bdd0c5a
update(aigateway): refresh user balance cache after recharge or consu…
csg-pr-bot Apr 9, 2026
8aa1265
Fix org space cannot deploy to become running (#969)
csg-pr-bot Apr 9, 2026
19eda42
update ut for check balance (#970)
csg-pr-bot Apr 9, 2026
9655b34
add resolved version to gitlab issue feedback email (#971)
csg-pr-bot Apr 9, 2026
9dd1416
Remove black space for prompt (#972)
csg-pr-bot Apr 9, 2026
5d96a05
feat: add guard-llm provider for sensitive check (#973)
csg-pr-bot Apr 9, 2026
d9fc62b
merge main
Apr 9, 2026
5b452a5
fix ut
Apr 9, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
152 changes: 98 additions & 54 deletions aigateway/component/openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import (
"errors"
"fmt"
"log/slog"
"slices"
"strconv"
"strings"
"time"
Expand Down Expand Up @@ -67,6 +68,8 @@ func (m *openaiComponentImpl) GetAvailableModels(c context.Context, userName str
externalModels := m.getExternalModels(c)
models = append(models, externalModels...)

models = m.enrichModelsWithPrice(c, models)

// Save models to cache asynchronously
go func(modelList []types.Model) {
if len(modelList) == 0 {
Expand Down Expand Up @@ -103,56 +106,72 @@ func (m *openaiComponentImpl) ListModels(c context.Context, userName string, req
return filterAndPaginateModels(models, req), nil
}

func filterAndPaginateModels(models []types.Model, req types.ListModelsReq) types.ModelList {
// Apply fuzzy search filter if model_id is provided
searchQuery := req.ModelID
if searchQuery != "" {
filtered := make([]types.Model, 0, len(models))
sq := strings.ToLower(searchQuery)
for _, model := range models {
if strings.Contains(strings.ToLower(model.ID), sq) {
filtered = append(filtered, model)
}
}
models = filtered
type modelFilter func(m *types.Model) bool

func filterByModelID(query string) modelFilter {
return func(m *types.Model) bool {
return strings.Contains(strings.ToLower(m.ID), query)
}
}

// Apply public filter if provided and parseable
if req.Public != "" {
if isPublic, err := strconv.ParseBool(req.Public); err == nil {
filtered := make([]types.Model, 0, len(models))
for _, model := range models {
if model.Public == isPublic {
filtered = append(filtered, model)
}
}
models = filtered
func filterBySource(source string) modelFilter {
return func(m *types.Model) bool {
switch source {
case string(types.ModelSourceCSGHub):
return m.CSGHubModelID != ""
case string(types.ModelSourceExternal):
return m.Provider != ""
default:
return true
}
}
}

// Apply source filter if provided
if req.Source != "" {
source := strings.ToLower(req.Source)
filtered := make([]types.Model, 0, len(models))
for _, model := range models {
switch source {
case string(types.ModelSourceCSGHub):
if model.CSGHubModelID != "" {
filtered = append(filtered, model)
}
case string(types.ModelSourceExternal):
if model.Provider != "" {
filtered = append(filtered, model)
}
default:
// Unknown source value, include all
filtered = append(filtered, model)
func filterByTask(task string) modelFilter {
return func(m *types.Model) bool {
modelTasks := strings.FieldsFunc(strings.ToLower(m.Task), func(r rune) bool {
return r == ','
})
return slices.Contains(modelTasks, task)
}
}

func applyFilters(models []types.Model, filters []modelFilter) []types.Model {
if len(filters) == 0 {
return models
}
filtered := make([]types.Model, 0, len(models))
for i := range models {
m := &models[i]
keep := true
for _, f := range filters {
if !f(m) {
keep = false
break
}
}
models = filtered
if keep {
filtered = append(filtered, *m)
}
}
return filtered
}

func filterAndPaginateModels(models []types.Model, req types.ListModelsReq) types.ModelList {
var filters []modelFilter

if searchQuery := strings.ToLower(req.ModelID); searchQuery != "" {
filters = append(filters, filterByModelID(searchQuery))
}
if source := strings.ToLower(req.Source); source != "" {
filters = append(filters, filterBySource(source))
}
if task := strings.ToLower(req.Task); task != "" {
filters = append(filters, filterByTask(task))
}

models = applyFilters(models, filters)

// Parse pagination parameters (defaults match previous handler behavior)
per := 20
page := 1
if req.Per != "" {
Expand All @@ -170,8 +189,7 @@ func filterAndPaginateModels(models []types.Model, req types.ListModelsReq) type
}

totalCount := len(models)
offset := (page - 1) * per
startIndex := offset
startIndex := (page - 1) * per
if startIndex > totalCount {
startIndex = totalCount
}
Expand All @@ -187,18 +205,29 @@ func filterAndPaginateModels(models []types.Model, req types.ListModelsReq) type
firstID = &paginated[0].ID
lastID = &paginated[len(paginated)-1].ID
}
hasMore := endIndex < totalCount

return types.ModelList{
Object: "list",
Data: paginated,
FirstID: firstID,
LastID: lastID,
HasMore: hasMore,
HasMore: endIndex < totalCount,
TotalCount: totalCount,
}
}

// providerTypeFromDeployType maps a deploy type integer to the LLM type string (MetaKeyLLMType).
func providerTypeFromDeployType(t int) string {
switch t {
case commontypes.ServerlessType:
return types.ProviderTypeServerless
case commontypes.InferenceType:
return types.ProviderTypeInference
default:
return types.ProviderTypeInference
}
}

func (m *openaiComponentImpl) getCSGHubModels(c context.Context, userID int64) ([]types.Model, error) {
runningDeploys, err := m.deployStore.RunningVisibleToUser(c, userID)
if err != nil {
Expand All @@ -212,11 +241,6 @@ func (m *openaiComponentImpl) getCSGHubModels(c context.Context, userID int64) (
}
// Check if engine_args contains tool-call-parser parameter
supportFunctionCall := strings.Contains(deploy.EngineArgs, "tool-call-parser")
// Determine public/private based on deployment type, ownership and secure level.
isPublic := true
if deploy.Type == commontypes.InferenceType && deploy.SecureLevel == commontypes.EndpointPrivate && deploy.UserID == userID {
isPublic = false // private - user's own deployment with private secure level
}
repoName := deploy.Repository.Name
m := types.Model{
BaseModel: types.BaseModel{
Expand All @@ -225,7 +249,9 @@ func (m *openaiComponentImpl) getCSGHubModels(c context.Context, userID int64) (
SupportFunctionCall: supportFunctionCall,
Task: string(deploy.Task),
DisplayName: repoName,
Public: isPublic,
Metadata: map[string]any{
types.MetaKeyLLMType: providerTypeFromDeployType(deploy.Type),
},
},
InternalModelInfo: types.InternalModelInfo{
CSGHubModelID: deploy.Repository.Path,
Expand Down Expand Up @@ -266,6 +292,8 @@ func (m *openaiComponentImpl) getExternalModels(c context.Context) []types.Model
search := &commontypes.SearchLLMConfig{}
searchType := 16
search.Type = &searchType
enabled := true
search.Enabled = &enabled

per := 50
page := 1
Expand All @@ -278,15 +306,31 @@ func (m *openaiComponentImpl) getExternalModels(c context.Context) []types.Model
}

for _, extModel := range extModels {
// Extract tasks from metadata if present
task := ""
if extModel.Metadata != nil {
if tasks, ok := extModel.Metadata[types.MetaKeyTasks].([]any); ok && len(tasks) > 0 {
tasksStrings := make([]string, 0, len(tasks))
for _, t := range tasks {
if s, ok := t.(string); ok {
tasksStrings = append(tasksStrings, s)
}
}
task = strings.Join(tasksStrings, ",")
}
}
if extModel.Metadata == nil {
extModel.Metadata = map[string]any{}
}
extModel.Metadata[types.MetaKeyLLMType] = types.ProviderTypeExternalLLM
m := types.Model{
BaseModel: types.BaseModel{
Object: "model",
ID: extModel.ModelName,
OwnedBy: extModel.Provider,
DisplayName: extModel.DisplayName,
// Metadata is allowed to be nil; JSON will contain `null` for nil maps.
Metadata: extModel.Metadata,
Public: true, // external models are always public
Metadata: extModel.Metadata,
Task: task,
},
Endpoint: extModel.ApiEndpoint,
ExternalModelInfo: types.ExternalModelInfo{
Expand Down
4 changes: 4 additions & 0 deletions aigateway/component/openai_ce.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,3 +48,7 @@ func parseScene(sceneValue string) common_types.SceneType {
func (e *extendOpenai) CheckBalance(ctx context.Context, username, userUUID string) error {
return nil
}

func (e *extendOpenai) enrichModelsWithPrice(_ context.Context, models []types.Model) []types.Model {
return models
}
Loading
Loading