Skip to content

Commit 0780aa3

Browse files
committed
feat: streaming helper
1 parent ec9fb79 commit 0780aa3

2 files changed

Lines changed: 4 additions & 5 deletions

File tree

listeners/assistant/assistant.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -118,23 +118,22 @@ def respond_in_assistant_thread(
118118

119119
returned_message = call_llm(messages_in_thread)
120120

121-
stream_response = client.chat_startStream(
121+
streamer = client.chat_stream(
122122
channel=channel_id,
123123
recipient_team_id=team_id,
124124
recipient_user_id=user_id,
125125
thread_ts=thread_ts,
126126
)
127-
stream_ts = stream_response["ts"]
128127

129128
# use of this for loop is specific to openai response method
130129
for event in returned_message:
131130
if event.type == "response.output_text.delta":
132-
client.chat_appendStream(channel=channel_id, ts=stream_ts, markdown_text=f"{event.delta}")
131+
streamer.append(markdown_text=f"{event.delta}")
133132
else:
134133
continue
135134

136135
feedback_block = create_feedback_block()
137-
client.chat_stopStream(channel=channel_id, ts=stream_ts, blocks=feedback_block)
136+
streamer.stop(blocks=feedback_block)
138137

139138
except Exception as e:
140139
logger.exception(f"Failed to handle a user message event: {e}")

requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
slack-sdk==3.36.0.dev5
2-
slack-bolt==1.26.0.dev2
2+
slack-bolt==1.26.0.dev3
33

44
# If you use a different LLM vendor, replace this dependency
55
openai

0 commit comments

Comments
 (0)