11import logging
22from typing import List , Dict
3- from slack_bolt import Assistant , BoltContext , Say , SetSuggestedPrompts , SetStatus
3+ from slack_bolt import Assistant , BoltContext , Say , SetSuggestedPrompts
44from slack_bolt .context .get_thread_context import GetThreadContext
55from slack_sdk import WebClient
6- from slack_sdk .errors import SlackApiError
76
87from ..llm_caller import call_llm
98
@@ -57,39 +56,20 @@ def respond_in_assistant_thread(
5756 payload : dict ,
5857 logger : logging .Logger ,
5958 context : BoltContext ,
60- set_status : SetStatus ,
61- get_thread_context : GetThreadContext ,
6259 client : WebClient ,
6360 say : Say ,
6461):
6562 try :
66- user_message = payload ["text" ]
67- set_status ("is typing..." )
68-
69- if user_message == "Can you generate a brief summary of the referred channel?" :
70- # the logic here requires the additional bot scopes:
71- # channels:join, channels:history, groups:history
72- thread_context = get_thread_context ()
73- referred_channel_id = thread_context .get ("channel_id" )
74- try :
75- channel_history = client .conversations_history (channel = referred_channel_id , limit = 50 )
76- except SlackApiError as e :
77- if e .response ["error" ] == "not_in_channel" :
78- # If this app's bot user is not in the public channel,
79- # we'll try joining the channel and then calling the same API again
80- client .conversations_join (channel = referred_channel_id )
81- channel_history = client .conversations_history (channel = referred_channel_id , limit = 50 )
82- else :
83- raise e
63+ channel_id = payload ["channel" ]
64+ thread_ts = payload ["thread_ts" ]
8465
85- prompt = f"Can you generate a brief summary of these messages in a Slack channel <#{ referred_channel_id } >?\n \n "
86- for message in reversed (channel_history .get ("messages" )):
87- if message .get ("user" ) is not None :
88- prompt += f"\n <@{ message ['user' ]} > says: { message ['text' ]} \n "
89- messages_in_thread = [{"role" : "user" , "content" : prompt }]
90- returned_message = call_llm (messages_in_thread )
91- say (returned_message )
92- return
66+ loading_messages = [
67+ "Teaching the hamsters to type faster…" ,
68+ "Untangling the internet cables…" ,
69+ "Consulting the office goldfish…" ,
70+ "Polishing up the response just for you…" ,
71+ "Convincing the AI to stop overthinking…" ,
72+ ]
9373
9474 replies = client .conversations_replies (
9575 channel = context .channel_id ,
@@ -101,8 +81,28 @@ def respond_in_assistant_thread(
10181 for message in replies ["messages" ]:
10282 role = "user" if message .get ("bot_id" ) is None else "assistant"
10383 messages_in_thread .append ({"role" : role , "content" : message ["text" ]})
84+
10485 returned_message = call_llm (messages_in_thread )
105- say (returned_message )
86+ client .assistant_threads_setStatus (
87+ channel_id = channel_id , thread_ts = thread_ts , status = "Bolt is typing" , loading_messages = loading_messages
88+ )
89+ stream_response = client .chat_startStream (
90+ channel = channel_id ,
91+ thread_ts = thread_ts ,
92+ )
93+ stream_ts = stream_response ["ts" ]
94+ # use of this for loop is specific to openai response method
95+ for event in returned_message :
96+ print (f"\n { event .type } " )
97+ if event .type == "response.output_text.delta" :
98+ client .chat_appendStream (channel = channel_id , ts = stream_ts , markdown_text = f"{ event .delta } " )
99+ else :
100+ continue
101+
102+ client .chat_stopStream (
103+ channel = channel_id ,
104+ ts = stream_ts ,
105+ )
106106
107107 except Exception as e :
108108 logger .exception (f"Failed to handle a user message event: { e } " )
0 commit comments