|
2 | 2 | import json |
3 | 3 | import re |
4 | 4 |
|
5 | | -__version__ = "0.1.2" |
| 5 | +# For the python function to JSON/dict |
| 6 | +import inspect |
| 7 | +from typing import Any, Callable, Literal, Union, get_args, get_origin |
| 8 | + |
| 9 | + |
| 10 | +__version__ = "0.1.3" |
6 | 11 |
|
7 | 12 | import requests |
8 | 13 | from packaging import version |
|
17 | 22 | except Exception: |
18 | 23 | pass |
19 | 24 |
|
| 25 | +# ============================== |
| 26 | +# |
| 27 | +# ============================== |
| 28 | + |
| 29 | +class utils: |
| 30 | + def _parse_simple_docstring(doc: str | None) -> dict[str, Any]: |
| 31 | + """Parse docstring minimal (description + args).""" |
| 32 | + result = {"description": "", "args": {}} |
| 33 | + if not doc: |
| 34 | + return result |
| 35 | + |
| 36 | + # Extract main description (first paragraph) |
| 37 | + parts = inspect.cleandoc(doc).split('\n\n', 1) |
| 38 | + result["description"] = parts[0].strip() |
| 39 | + |
| 40 | + # Simple args parsing (Google/NumPy style) |
| 41 | + if len(parts) > 1: |
| 42 | + args_section = parts[1].split('Args:')[-1].split('Returns:')[0].split('Raises:')[0] |
| 43 | + lines = [l.strip() for l in args_section.split('\n') if l.strip()] |
| 44 | + |
| 45 | + for line in lines: |
| 46 | + if ':' in line and not line.startswith(' '): |
| 47 | + # Format: "arg_name: description" or "arg_name (type): description" |
| 48 | + arg_match = re.match(r'(\w+)\s*(?:\([^)]*\))?\s*:\s*(.+)', line) |
| 49 | + if arg_match: |
| 50 | + arg_name, desc = arg_match.groups() |
| 51 | + result["args"][arg_name] = desc.strip() |
| 52 | + |
| 53 | + return result |
| 54 | + |
| 55 | + def _python_type_to_schema(py_type: Any) -> dict[str, Any]: |
| 56 | + """Convert Python type to JSON Schema - MINIMAL version.""" |
| 57 | + origin = get_origin(py_type) |
| 58 | + args = get_args(py_type) |
| 59 | + |
| 60 | + # Optional: Union[X, None] |
| 61 | + if origin is Union and type(None) in args: |
| 62 | + non_none = [a for a in args if a is not type(None)] |
| 63 | + if len(non_none) == 1: |
| 64 | + schema = utils._python_type_to_schema(non_none[0]) |
| 65 | + schema["nullable"] = True |
| 66 | + return schema |
| 67 | + |
| 68 | + # Literal for enums |
| 69 | + if origin is Literal: |
| 70 | + return {"type": "string", "enum": list(args)} |
| 71 | + |
| 72 | + # Basic types |
| 73 | + if py_type in (str, int, float, bool, type(None)): |
| 74 | + type_map = {str: "string", int: "integer", float: "number", bool: "boolean", type(None): "null"} |
| 75 | + return {"type": type_map[py_type]} |
| 76 | + |
| 77 | + # Collections |
| 78 | + if origin in (list,): |
| 79 | + item_schema = {"type": "string"} # Default |
| 80 | + if args: |
| 81 | + item_schema = utils._python_type_to_schema(args[0]) |
| 82 | + return {"type": "array", "items": item_schema} |
| 83 | + |
| 84 | + if origin in (dict,): |
| 85 | + return {"type": "object"} |
| 86 | + |
| 87 | + # Default fallback |
| 88 | + return {"type": "string"} |
| 89 | + |
| 90 | + def function_to_openai_tool(func: Callable) -> dict[str, Any]: |
| 91 | + """Convert Python function to OpenAI tool format - MINIMAL.""" |
| 92 | + sig = inspect.signature(func) |
| 93 | + type_hints = func.__annotations__ |
| 94 | + |
| 95 | + # Parse docstring |
| 96 | + doc_info = utils._parse_simple_docstring(func.__doc__ or "") |
| 97 | + |
| 98 | + # Build schema |
| 99 | + properties = {} |
| 100 | + required = [] |
| 101 | + |
| 102 | + for param_name, param in sig.parameters.items(): |
| 103 | + # Get type annotation |
| 104 | + py_type = type_hints.get(param_name, str) |
| 105 | + schema = utils._python_type_to_schema(py_type) |
| 106 | + |
| 107 | + # Add description from docstring |
| 108 | + if param_name in doc_info["args"]: |
| 109 | + schema["description"] = doc_info["args"][param_name] |
| 110 | + |
| 111 | + # Handle defaults |
| 112 | + if param.default is not inspect.Parameter.empty: |
| 113 | + schema["default"] = param.default |
| 114 | + if param.default is None: |
| 115 | + schema["nullable"] = True |
| 116 | + else: |
| 117 | + required.append(param_name) |
| 118 | + |
| 119 | + properties[param_name] = schema |
| 120 | + |
| 121 | + return { |
| 122 | + "type": "function", |
| 123 | + "function": { |
| 124 | + "name": func.__name__, |
| 125 | + "description": doc_info["description"], |
| 126 | + "parameters": { |
| 127 | + "type": "object", |
| 128 | + "properties": properties, |
| 129 | + "required": required, |
| 130 | + "additionalProperties": False |
| 131 | + } |
| 132 | + } |
| 133 | + } |
| 134 | + |
| 135 | +# Utility for multiple functions, code by Kimi k2 thinking |
| 136 | +def functions_to_tools(funcs: list[Callable]) -> list[dict[str, Any]]: |
| 137 | + return [utils.function_to_openai_tool(f) for f in funcs] |
| 138 | + |
| 139 | + |
20 | 140 | class clients: |
21 | 141 |
|
22 | 142 | # ============================== |
@@ -72,8 +192,15 @@ def openrouter(api_key: str) -> openai.OpenAI: |
72 | 192 | """ |
73 | 193 | return openai.OpenAI(api_key=api_key, base_url="https://openrouter.ai/api/v1") |
74 | 194 |
|
| 195 | + @staticmethod |
| 196 | + def ollama() -> openai.OpenAI: |
| 197 | + """ |
| 198 | + Use `clients.generic_request` for call |
| 199 | + """ |
| 200 | + return openai.OpenAI(api_key="", base_url="http://localhost:11434/v1") |
| 201 | + |
75 | 202 | # ============================== |
76 | | -# Customers for calls with their specifications |
| 203 | +# Customers for calls with their specifications" |
77 | 204 | # |
78 | 205 | # Like "include_venice_system_prompt" for venice.ai or custom app for openrouter |
79 | 206 | # ============================== |
@@ -167,158 +294,6 @@ def openrouter_request(client: openai.OpenAI, messages: list[dict], model:str="n |
167 | 294 | } |
168 | 295 | ) |
169 | 296 |
|
170 | | -# ============================== |
171 | | -# ollama, experimental and non-mandatory installation |
172 | | -# ============================== |
173 | | - |
174 | | -try: |
175 | | - import ollama # Code by Grok 4.1 on Venice.ai |
176 | | - |
177 | | - @staticmethod |
178 | | - def ollama_request(messages: list[dict], model: str="Qwen3:4b", temperature: float=0.4, max_tokens: int=4096, tools: list[dict]=None, **kwargs): |
179 | | - """ |
180 | | - Streaming requests with Ollama (local). |
181 | | - Returns OpenAI-compatible ChatCompletionChunk stream for handle_streaming. |
182 | | - Supports tool calls. |
183 | | - """ |
184 | | - import time |
185 | | - import json |
186 | | - |
187 | | - # OpenAI-compatible classes for handle_streaming |
188 | | - class Function: |
189 | | - def __init__(self, name="", arguments=""): |
190 | | - self.name = name |
191 | | - self.arguments = arguments |
192 | | - |
193 | | - class ToolCall: |
194 | | - def __init__(self, index=0, id="", type="function", function=None): |
195 | | - self.index = index |
196 | | - self.id = id |
197 | | - self.type = type |
198 | | - self.function = function or Function() |
199 | | - |
200 | | - class Delta: |
201 | | - def __init__(self, content="", tool_calls=None, finish_reason=None): |
202 | | - self.content = content |
203 | | - self.tool_calls = tool_calls |
204 | | - self.finish_reason = finish_reason |
205 | | - |
206 | | - class Choice: |
207 | | - def __init__(self, index=0, delta=None, finish_reason=None): |
208 | | - self.index = index |
209 | | - self.delta = delta or Delta() |
210 | | - self.finish_reason = finish_reason |
211 | | - |
212 | | - class Chunk: |
213 | | - def __init__(self, id="", object="chat.completion.chunk", model="", choices=None, created=0): |
214 | | - self.id = id |
215 | | - self.object = object |
216 | | - self.model = model |
217 | | - self.choices = choices or [] |
218 | | - self.created = created |
219 | | - |
220 | | - # Transform messages from OpenAI format to Ollama format |
221 | | - ollama_messages = [] |
222 | | - for msg in messages: |
223 | | - new_msg = dict(msg) |
224 | | - |
225 | | - # Transform tool_calls in assistant messages (OpenAI → Ollama) |
226 | | - if "tool_calls" in new_msg and new_msg["tool_calls"]: |
227 | | - transformed_tool_calls = [] |
228 | | - for tc in new_msg["tool_calls"]: |
229 | | - args = tc.get("function", {}).get("arguments", "{}") |
230 | | - # Ollama wants dict, OpenAI sends string JSON |
231 | | - if isinstance(args, str): |
232 | | - try: |
233 | | - args = json.loads(args) |
234 | | - except json.JSONDecodeError: |
235 | | - args = {} |
236 | | - # Ollama doesn't want id/type, only function |
237 | | - transformed_tool_calls.append({ |
238 | | - "function": { |
239 | | - "name": tc.get("function", {}).get("name", ""), |
240 | | - "arguments": args |
241 | | - } |
242 | | - }) |
243 | | - new_msg["tool_calls"] = transformed_tool_calls |
244 | | - |
245 | | - ollama_messages.append(new_msg) |
246 | | - |
247 | | - # Map parameters to Ollama options format |
248 | | - options = { |
249 | | - "temperature": temperature, |
250 | | - "num_predict": max_tokens, |
251 | | - } |
252 | | - if "max_completion_tokens" in kwargs: |
253 | | - options["num_predict"] = kwargs["max_completion_tokens"] |
254 | | - # Add other options from kwargs (exclude non-ollama params) |
255 | | - for key, value in kwargs.items(): |
256 | | - if key not in ["tool_choice", "stream", "max_completion_tokens", "client"]: |
257 | | - options[key] = value |
258 | | - |
259 | | - params = { |
260 | | - "model": model, |
261 | | - "messages": ollama_messages, |
262 | | - "options": options, |
263 | | - "stream": True, |
264 | | - } |
265 | | - if tools: |
266 | | - params["tools"] = tools |
267 | | - |
268 | | - ollama_stream = ollama.chat(**params) |
269 | | - |
270 | | - base_time = int(time.time()) |
271 | | - chunk_id = f"chatcmpl-ollama-{base_time}" |
272 | | - generated_ids = {} |
273 | | - |
274 | | - for ollama_chunk in ollama_stream: |
275 | | - message = ollama_chunk.get("message", {}) |
276 | | - content = message.get("content", "") |
277 | | - tool_calls_raw = message.get("tool_calls") or [] |
278 | | - |
279 | | - # Transform tool_calls: Ollama dict → OpenAI object format |
280 | | - tool_calls_transformed = None |
281 | | - if tool_calls_raw: |
282 | | - tool_calls_transformed = [] |
283 | | - for i, tc in enumerate(tool_calls_raw): |
284 | | - if i not in generated_ids: |
285 | | - generated_ids[i] = f"call_{i}_{base_time}" |
286 | | - |
287 | | - func_data = tc.get("function", {}) |
288 | | - |
289 | | - # Handle arguments: Ollama returns dict, OpenAI expects JSON string |
290 | | - args = func_data.get("arguments", "") |
291 | | - if isinstance(args, dict): |
292 | | - args = json.dumps(args) |
293 | | - |
294 | | - function_obj = Function( |
295 | | - name=func_data.get("name", ""), |
296 | | - arguments=args |
297 | | - ) |
298 | | - tool_call_obj = ToolCall( |
299 | | - index=i, |
300 | | - id=generated_ids[i], |
301 | | - type="function", |
302 | | - function=function_obj |
303 | | - ) |
304 | | - tool_calls_transformed.append(tool_call_obj) |
305 | | - |
306 | | - # Final chunk |
307 | | - if ollama_chunk.get("done", False): |
308 | | - delta = Delta(content="", tool_calls=None, finish_reason="stop") |
309 | | - choice = Choice(0, delta, "stop") |
310 | | - yield Chunk(chunk_id, model=model, choices=[choice], created=base_time) |
311 | | - break |
312 | | - |
313 | | - # Yield content or tool_calls |
314 | | - if content or tool_calls_transformed: |
315 | | - delta = Delta(content=content, tool_calls=tool_calls_transformed) |
316 | | - choice = Choice(0, delta) |
317 | | - yield Chunk(chunk_id, model=model, choices=[choice], created=base_time) |
318 | | - |
319 | | -except: |
320 | | - pass |
321 | | - |
322 | 297 | # ============================== |
323 | 298 | # Functions for the streaming |
324 | 299 | # ============================== |
|
0 commit comments