Skip to content

Commit 9e88bce

Browse files
authored
feat: add client.chat_stream helper (#16)
1 parent ec9fb79 commit 9e88bce

3 files changed

Lines changed: 14 additions & 14 deletions

File tree

listeners/assistant/assistant.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -118,23 +118,23 @@ def respond_in_assistant_thread(
118118

119119
returned_message = call_llm(messages_in_thread)
120120

121-
stream_response = client.chat_startStream(
121+
streamer = client.chat_stream(
122122
channel=channel_id,
123123
recipient_team_id=team_id,
124124
recipient_user_id=user_id,
125125
thread_ts=thread_ts,
126126
)
127-
stream_ts = stream_response["ts"]
128127

129-
# use of this for loop is specific to openai response method
128+
# Loop over OpenAI response stream
129+
# https://platform.openai.com/docs/api-reference/responses/create
130130
for event in returned_message:
131131
if event.type == "response.output_text.delta":
132-
client.chat_appendStream(channel=channel_id, ts=stream_ts, markdown_text=f"{event.delta}")
132+
streamer.append(markdown_text=f"{event.delta}")
133133
else:
134134
continue
135135

136136
feedback_block = create_feedback_block()
137-
client.chat_stopStream(channel=channel_id, ts=stream_ts, blocks=feedback_block)
137+
streamer.stop(blocks=feedback_block)
138138

139139
except Exception as e:
140140
logger.exception(f"Failed to handle a user message event: {e}")

listeners/events/app_mentioned.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -34,23 +34,23 @@ def app_mentioned_callback(client: WebClient, event: dict, logger: Logger, say:
3434

3535
returned_message = call_llm([{"role": "user", "content": text}])
3636

37-
stream_response = client.chat_startStream(
38-
channel=channel_id, recipient_team_id=team_id, recipient_user_id=user_id, thread_ts=thread_ts
37+
streamer = client.chat_stream(
38+
channel=channel_id,
39+
recipient_team_id=team_id,
40+
recipient_user_id=user_id,
41+
thread_ts=thread_ts,
3942
)
4043

41-
stream_ts = stream_response["ts"]
42-
4344
# Loop over OpenAI response stream
4445
# https://platform.openai.com/docs/api-reference/responses/create
4546
for event in returned_message:
4647
if event.type == "response.output_text.delta":
47-
client.chat_appendStream(channel=channel_id, ts=stream_ts, markdown_text=f"{event.delta}")
48+
streamer.append(markdown_text=f"{event.delta}")
4849
else:
4950
continue
5051

5152
feedback_block = create_feedback_block()
52-
client.chat_stopStream(channel=channel_id, ts=stream_ts, blocks=feedback_block)
53-
53+
streamer.stop(blocks=feedback_block)
5454
except Exception as e:
5555
logger.exception(f"Failed to handle a user message event: {e}")
5656
say(f":warning: Something went wrong! ({e})")

requirements.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
slack-sdk==3.36.0.dev5
2-
slack-bolt==1.26.0.dev2
1+
slack-sdk==3.36.0.dev6
2+
slack-bolt==1.26.0.dev3
33

44
# If you use a different LLM vendor, replace this dependency
55
openai

0 commit comments

Comments
 (0)