Skip to content

Commit 9b9b051

Browse files
committed
feat: update app_mentioned_callback to not require conversations history
1 parent b41054a commit 9b9b051

2 files changed

Lines changed: 18 additions & 21 deletions

File tree

listeners/assistant/assistant.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ def respond_in_assistant_thread(
7373
user_message = payload["text"]
7474

7575
set_status(
76-
status="Drafting...",
76+
status="thinking...",
7777
loading_messages=[
7878
"Teaching the hamsters to type faster…",
7979
"Untangling the internet cables…",

listeners/events/app_mentioned.py

Lines changed: 17 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,9 @@
11
from logging import Logger
22
from slack_sdk import WebClient
33
from slack_bolt import Say
4-
from typing import List, Dict
54

65
from ..llm_caller import call_llm
76
from ..views.feedback_block import create_feedback_block
8-
from ..listeners_constants import loading_messages
97

108
"""
119
Handles the event when the app is mentioned in a Slack channel, retrieves the conversation context,
@@ -15,36 +13,35 @@
1513

1614
def app_mentioned_callback(client: WebClient, event: dict, logger: Logger, say: Say):
1715
try:
18-
1916
channel_id = event.get("channel")
20-
thread_ts = event.get("thread_ts")
21-
user_id = event.get("user")
2217
team_id = event.get("team")
2318
text = event.get("text")
24-
25-
if thread_ts:
26-
conversation_context = client.conversations_replies(channel=channel_id, ts=thread_ts, limit=10)
27-
else:
28-
conversation_context = client.conversations_history(channel=channel_id, limit=50)
29-
thread_ts = event["ts"]
30-
31-
messages_in_thread: List[Dict[str, str]] = []
32-
for message in conversation_context["messages"]:
33-
role = "user" if message.get("bot_id") is None else "assistant"
34-
messages_in_thread.append({"role": role, "content": message["text"]})
35-
if text:
36-
returned_message = call_llm(messages_in_thread)
19+
thread_ts = event.get("thread_ts") or event.get("ts")
20+
user_id = event.get("user")
3721

3822
client.assistant_threads_setStatus(
39-
channel_id=channel_id, thread_ts=thread_ts, status="Bolt is typing", loading_messages=loading_messages
23+
channel_id=channel_id,
24+
thread_ts=thread_ts,
25+
status="thinking...",
26+
loading_messages=[
27+
"Teaching the hamsters to type faster…",
28+
"Untangling the internet cables…",
29+
"Consulting the office goldfish…",
30+
"Polishing up the response just for you…",
31+
"Convincing the AI to stop overthinking…",
32+
],
4033
)
34+
35+
returned_message = call_llm([{"role": "user", "content": text}])
36+
4137
stream_response = client.chat_startStream(
4238
channel=channel_id, recipient_team_id=team_id, recipient_user_id=user_id, thread_ts=thread_ts
4339
)
4440

4541
stream_ts = stream_response["ts"]
4642

47-
# use of this for loop is specific to openai response method
43+
# Loop over OpenAI response stream
44+
# https://platform.openai.com/docs/api-reference/responses/create
4845
for event in returned_message:
4946
if event.type == "response.output_text.delta":
5047
client.chat_appendStream(channel=channel_id, ts=stream_ts, markdown_text=f"{event.delta}")

0 commit comments

Comments
 (0)