22import json
33import re
44
5- __version__ = "0.0.3_genesis "
5+ __version__ = "0.0.4_genesis "
66
77class clients :
8+
9+ # ==============================
10+ # The clients with their URL
11+ # ==============================
12+
813 @staticmethod
914 def veniceai (api_key : str ) -> openai .OpenAI :
15+ """
16+ Use `clients.veniceai_request` for call
17+ """
1018 return openai .OpenAI (api_key = api_key , base_url = "https://api.venice.ai/api/v1" )
1119
1220 @staticmethod
1321 def deepseek (api_key : str ) -> openai .OpenAI :
22+ """
23+ Use `clients.generic_request` for call
24+ """
1425 return openai .OpenAI (api_key = api_key , base_url = "https://api.deepseek.com" )
1526
1627 @staticmethod
1728 def openrouter (api_key : str ) -> openai .OpenAI :
29+ """
30+ Use `clients.openrouter_request` for call
31+ """
1832 return openai .OpenAI (api_key = api_key , base_url = "https://openrouter.ai/api/v1" )
1933
34+ @staticmethod
35+ def xai (api_key : str ) -> openai .OpenAI :
36+ """
37+ Use `clients.generic_request` for call
38+ """
39+ return openai .OpenAI (api_key = api_key , base_url = "https://api.x.ai/v1" )
40+
41+ @staticmethod
42+ def groq (api_key : str ) -> openai .OpenAI :
43+ """
44+ Use `clients.generic_request` for call
45+ """
46+ return openai .OpenAI (api_key = api_key , base_url = "https://api.groq.com/openai/v1" )
47+
48+
49+ # ==============================
50+ # Customers for calls with their specifications
51+ #
52+ # Like "include_venice_system_prompt" for venice.ai or custom app for openrouter
53+ # ==============================
54+
2055 @staticmethod
2156 def veniceai_request (client : openai .OpenAI , messages : list [dict ], model :str = "defaut" , temperature :float = 0.7 , max_tokens :int = 4096 , tools :list [dict ]= None , include_venice_system_prompt :bool = False , ** kwargs ) -> openai .Stream :
2257 base_params = {
@@ -48,9 +83,9 @@ def veniceai_request(client: openai.OpenAI, messages: list[dict], model:str="def
4883 params = {** base_params , ** tool_params , ** venice_params }
4984
5085 return client .chat .completions .create (** params )
51-
86+
5287 @staticmethod
53- def generic_request (client : openai .OpenAI , messages : list [dict ], model :str = "defaut" , temperature :float = 0.7 , max_tokens :int = 4096 , tools :list [dict ]= None , ** kwargs ) -> openai .Stream :
88+ def openrouter_request (client : openai .OpenAI , messages : list [dict ], model :str = "defaut" , temperature :float = 0.7 , max_tokens :int = 4096 , tools :list [dict ]= None , ** kwargs ) -> openai .Stream :
5489 base_params = {
5590 "model" : model ,
5691 "messages" : messages ,
@@ -69,10 +104,16 @@ def generic_request(client: openai.OpenAI, messages: list[dict], model:str="defa
69104
70105 params = {** base_params , ** tool_params }
71106
72- return client .chat .completions .create (** params )
107+ return client .chat .completions .create (
108+ ** params ,
109+ extra_headers = {
110+ "HTTP-Referer" : "https://zanomega.com/" ,
111+ "X-Title" : "Zanomega/open-taranis"
112+ }
113+ )
73114
74115 @staticmethod
75- def openrouter_request (client : openai .OpenAI , messages : list [dict ], model :str = "defaut" , temperature :float = 0.7 , max_tokens :int = 4096 , tools :list [dict ]= None , ** kwargs ) -> openai .Stream :
116+ def generic_request (client : openai .OpenAI , messages : list [dict ], model :str = "defaut" , temperature :float = 0.7 , max_tokens :int = 4096 , tools :list [dict ]= None , ** kwargs ) -> openai .Stream :
76117 base_params = {
77118 "model" : model ,
78119 "messages" : messages ,
@@ -91,13 +132,11 @@ def openrouter_request(client: openai.OpenAI, messages: list[dict], model:str="d
91132
92133 params = {** base_params , ** tool_params }
93134
94- return client .chat .completions .create (
95- ** params ,
96- extra_headers = {
97- "HTTP-Referer" : "https://zanomega.com/" ,
98- "X-Title" : "Zanomega/open-taranis"
99- }
100- )
135+ return client .chat .completions .create (** params )
136+
137+ # ==============================
138+ # Functions for the streaming
139+ # ==============================
101140
102141def handle_streaming (stream : openai .Stream ):
103142 """
@@ -199,7 +238,11 @@ def handle_tool_call(tool_call:dict) -> tuple[str, str, dict, str] :
199238
200239 return fid , fname , args , ""
201240
202- def create_assistant_response (content :str , tool_calls :list [dict ]= None ) -> dict :
241+ # ==============================
242+ # Functions to simplify the messages roles
243+ # ==============================
244+
245+ def create_assistant_response (content :str , tool_calls :list [dict ]= None ) -> dict [str , str ]:
203246 """
204247 Creates an assistant message, optionally with tool calls.
205248
0 commit comments