|
3 | 3 | from autogen_ext.models.anthropic._model_info import _MODEL_INFO as anthropic_models |
4 | 4 | from autogen_ext.models.ollama._model_info import _MODEL_INFO as ollama_models |
5 | 5 | from autogen_ext.models.openai._model_info import _MODEL_INFO as openai_models |
| 6 | +from autogen_ext.models.openai._model_info import _MODEL_POINTERS |
6 | 7 | from fastapi import APIRouter |
7 | 8 |
|
8 | 9 | router = APIRouter() |
9 | 10 |
|
10 | 11 |
|
11 | 12 | @router.get("/") |
12 | 13 | async def list_models() -> Dict[str, List[Dict[str, Any]]]: |
13 | | - # Get the model names from the model info, also get the "function_calling" value for each model |
14 | | - |
15 | | - response = { |
16 | | - "anthropic": [ |
17 | | - { |
18 | | - "name": model, |
19 | | - "function_calling": anthropic_models[model]["function_calling"], |
20 | | - } |
21 | | - for model in anthropic_models.keys() |
22 | | - ], |
23 | | - "ollama": [ |
24 | | - { |
25 | | - "name": model, |
26 | | - "function_calling": ollama_models[model]["function_calling"], |
27 | | - } |
28 | | - for model in ollama_models.keys() |
29 | | - ], |
30 | | - "openAI": [ |
31 | | - { |
32 | | - "name": model, |
33 | | - "function_calling": openai_models[model]["function_calling"], |
34 | | - } |
35 | | - for model in openai_models.keys() |
36 | | - ], |
37 | | - "azureOpenAI": [ |
| 14 | + response_ollama = [] |
| 15 | + for model_name, model_data in ollama_models.items(): |
| 16 | + response_ollama.append( |
38 | 17 | { |
39 | | - "name": model, |
40 | | - "function_calling": openai_models[model]["function_calling"], |
| 18 | + "name": model_name, |
| 19 | + "function_calling": model_data["function_calling"], |
41 | 20 | } |
42 | | - for model in openai_models.keys() |
43 | | - ], |
44 | | - } |
| 21 | + ) |
45 | 22 |
|
46 | | - return response |
| 23 | + final_anthropic_models_map = {} |
| 24 | + for model_name, model_data in anthropic_models.items(): |
| 25 | + final_anthropic_models_map[model_name] = {"function_calling": model_data["function_calling"]} |
| 26 | + |
| 27 | + for short_name, long_name_target in _MODEL_POINTERS.items(): |
| 28 | + if short_name.startswith("claude-"): |
| 29 | + if long_name_target in anthropic_models: |
| 30 | + properties = anthropic_models[long_name_target] |
| 31 | + final_anthropic_models_map[short_name] = {"function_calling": properties["function_calling"]} |
| 32 | + |
| 33 | + response_anthropic = [{"name": name, **props} for name, props in final_anthropic_models_map.items()] |
| 34 | + |
| 35 | + final_openai_models_map = {} |
| 36 | + for model_name, model_data in openai_models.items(): |
| 37 | + final_openai_models_map[model_name] = {"function_calling": model_data["function_calling"]} |
| 38 | + |
| 39 | + for short_name, long_name_target in _MODEL_POINTERS.items(): |
| 40 | + if not short_name.startswith("claude-"): |
| 41 | + if long_name_target in openai_models: |
| 42 | + properties = openai_models[long_name_target] |
| 43 | + final_openai_models_map[short_name] = {"function_calling": properties["function_calling"]} |
| 44 | + |
| 45 | + response_openai = [{"name": name, **props} for name, props in final_openai_models_map.items()] |
| 46 | + |
| 47 | + return { |
| 48 | + "anthropic": response_anthropic, |
| 49 | + "ollama": response_ollama, |
| 50 | + "openAI": response_openai, |
| 51 | + "azureOpenAI": response_openai, |
| 52 | + } |
0 commit comments