55"""
66
77import json
8- import random
9- from typing import Optional , Union
8+ from typing import Union
109
1110import openai
1211from openai .types .chat import (
12+ ChatCompletion ,
1313 ChatCompletionAssistantMessageParam ,
1414 ChatCompletionFunctionToolParam ,
1515 ChatCompletionMessage ,
2020)
2121
2222from app import settings
23+ from app .logging_config import get_logger , setup_logging
2324from app .stackademy import stackademy_app
2425
2526
27+ # Initialize logging
28+ setup_logging ()
29+ logger = get_logger (__name__ )
30+
31+
2632def tool_factory_get_courses () -> ChatCompletionFunctionToolParam :
2733 """Factory function to create a tool for getting courses"""
2834 return ChatCompletionFunctionToolParam (
@@ -50,6 +56,36 @@ def tool_factory_get_courses() -> ChatCompletionFunctionToolParam:
5056 )
5157
5258
59+ def tool_factory_register () -> ChatCompletionFunctionToolParam :
60+ """Factory function to create a tool for registering a user"""
61+ return ChatCompletionFunctionToolParam (
62+ type = "function" ,
63+ function = {
64+ "name" : "register_course" ,
65+ "description" : "Register a student in a course with the provided details." ,
66+ "parameters" : {
67+ "type" : "object" ,
68+ "required" : ["course_code" , "email" , "full_name" ],
69+ "properties" : {
70+ "course_code" : {
71+ "type" : "string" ,
72+ "description" : "The unique code for the course." ,
73+ },
74+ "email" : {
75+ "type" : "string" ,
76+ "description" : "The email address of the new user." ,
77+ },
78+ "full_name" : {
79+ "type" : "string" ,
80+ "description" : "The full name of the new user." ,
81+ },
82+ },
83+ "additionalProperties" : False ,
84+ },
85+ },
86+ )
87+
88+
5389messages : list [
5490 Union [
5591 ChatCompletionSystemMessageParam ,
@@ -61,6 +97,7 @@ def tool_factory_get_courses() -> ChatCompletionFunctionToolParam:
6197 ChatCompletionSystemMessageParam (
6298 role = "system" ,
6399 content = """You are a helpful assistant for the Stackademy online learning platform.
100+ If the user wants no further assistance, respond with "Goodbye!".
64101 Prioritize use of the functions available to you as needed.
65102 Do not provide answers that are not based on the functions available to you.
66103 Your task is to assist users with their queries related to the platform,
@@ -74,9 +111,6 @@ def tool_factory_get_courses() -> ChatCompletionFunctionToolParam:
74111 ),
75112]
76113
77- # Define tools separately
78- tools = [tool_factory_get_courses ()]
79-
80114
81115def handle_function_call (function_name : str , arguments : dict ) -> str :
82116 """Handle function calls from the OpenAI API."""
@@ -91,11 +125,67 @@ def handle_function_call(function_name: str, arguments: dict) -> str:
91125 # Return as JSON string
92126 return json .dumps (courses , default = str , indent = 2 )
93127
128+ if function_name == "register_course" :
129+ course_code = arguments .get ("course_code" , "MISSING COURSE CODE" )
130+ email = arguments .get ("email" , "MISSING EMAIL" )
131+ full_name = arguments .get ("full_name" , "MISSING NAME" )
132+
133+ # Call the actual function
134+ success = stackademy_app .register_course (course_code = course_code , email = email , full_name = full_name )
135+
136+ # Return result as JSON string
137+ return json .dumps ({"success" : success })
138+
94139 return json .dumps ({"error" : f"Unknown function: { function_name } " })
95140
96141
142+ def process_tool_calls (message : ChatCompletionMessage ) -> list [str ]:
143+ """Process tool calls in the messages list."""
144+ functions_called = []
145+ if not isinstance (message , ChatCompletionMessage ) or not message .tool_calls :
146+ return functions_called
147+ for tool_call in message .tool_calls :
148+
149+ # For function calls, access via type checking
150+ if tool_call .type == "function" :
151+ function_name = tool_call .function .name
152+ function_args = json .loads (tool_call .function .arguments )
153+ functions_called .append (function_name )
154+ tool_calls_param = [
155+ ChatCompletionMessageFunctionToolCallParam (
156+ id = tool_call .id ,
157+ type = "function" ,
158+ function = {
159+ "name" : function_name ,
160+ "arguments" : tool_call .function .arguments , # Keep as string, don't parse
161+ },
162+ )
163+ ]
164+ assistant_content = message .content if message .content else "Accessing tool..."
165+ messages .append (
166+ ChatCompletionAssistantMessageParam (
167+ role = "assistant" , content = assistant_content , tool_calls = tool_calls_param
168+ )
169+ )
170+ logger .info ("Function call detected: %s with args %s" , function_name , function_args )
171+
172+ # Execute the function
173+ function_result = handle_function_call (function_name , function_args )
174+
175+ # Add the function result to the conversation
176+ tool_message = ChatCompletionToolMessageParam (
177+ role = "tool" , content = function_result , tool_call_id = tool_call .id
178+ )
179+ messages .append (tool_message )
180+
181+ logger .debug (
182+ "Updated messages: %s" , [msg .model_dump () if not isinstance (msg , dict ) else msg for msg in messages ]
183+ )
184+ return functions_called
185+
186+
97187# pylint: disable=too-many-locals
98- def completion (prompt : str ):
188+ def completion (prompt : str ) -> tuple [ ChatCompletion , list [ str ]] :
99189 """LLM text completion"""
100190
101191 # Set the OpenAI API key
@@ -108,78 +198,36 @@ def completion(prompt: str):
108198 temperature = settings .OPENAI_API_TEMPERATURE
109199 max_tokens = settings .OPENAI_API_MAX_TOKENS
110200 messages .append (ChatCompletionUserMessageParam (role = "user" , content = prompt ))
201+ functions_called = []
111202
112203 # Call the OpenAI API
113204 # -------------------------------------------------------------------------
114205 response = openai .chat .completions .create (
115206 model = model ,
116207 messages = messages ,
117208 tool_choice = {"type" : "function" , "function" : {"name" : "get_courses" }},
118- tools = tools ,
209+ tools = [ tool_factory_get_courses ()] ,
119210 temperature = temperature ,
120211 max_tokens = max_tokens ,
121212 )
122- print ( response .model_dump ())
213+ logger . debug ( "Initial response: %s" , response .model_dump ())
123214
124215 # Check if the model wants to call a function
125216 # -------------------------------------------------------------------------
126217 message = response .choices [0 ].message
127218
128- if message .tool_calls :
129-
130- # Process each tool call
131- for tool_call in message .tool_calls :
132-
133- # For function calls, access via type checking
134- if tool_call .type == "function" :
135- function_name = tool_call .function .name
136- function_args = json .loads (tool_call .function .arguments )
137- tool_calls_param = [
138- ChatCompletionMessageFunctionToolCallParam (
139- id = tool_call .id ,
140- type = "function" ,
141- function = {
142- "name" : function_name ,
143- "arguments" : tool_call .function .arguments , # Keep as string, don't parse
144- },
145- )
146- ]
147- assistant_content = message .content if message .content else "Accessing tool..."
148- messages .append (
149- ChatCompletionAssistantMessageParam (
150- role = "assistant" , content = assistant_content , tool_calls = tool_calls_param
151- )
152- )
153- print (f"Function call detected: { function_name } with args { function_args } " )
154-
155- # Execute the function
156- function_result = handle_function_call (function_name , function_args )
157-
158- # Add the function result to the conversation
159- tool_message = ChatCompletionToolMessageParam (
160- role = "tool" , content = function_result , tool_call_id = tool_call .id
161- )
162- messages .append (tool_message )
163-
164- print (f"Updated messages: { [msg .model_dump () if not isinstance (msg , dict ) else msg for msg in messages ]} " )
219+ while message .tool_calls :
220+ functions_called = process_tool_calls (message )
165221
166222 # Make another API call to get the final response
167- final_response = openai .chat .completions .create (
223+ response = openai .chat .completions .create (
168224 model = model ,
169225 messages = messages ,
170- tools = tools ,
226+ tools = [ tool_factory_get_courses (), tool_factory_register ()] ,
171227 temperature = temperature ,
172228 max_tokens = max_tokens ,
173229 )
230+ message = response .choices [0 ].message
231+ logger .debug ("Updated response: %s" , response .model_dump ())
174232
175- final_message = final_response .choices [0 ].message
176- retval = final_message .content
177- print (final_response .model_dump ())
178- else :
179- # No function call, just return the content
180- retval = message .content
181-
182- # Print the response
183- # -------------------------------------------------------------------------
184- print (retval )
185- return retval
233+ return response , functions_called
0 commit comments