Skip to content

Commit 807ad83

Browse files
committed
feat: in-session conversation memory for AI chat
Passes the full conversation history (up to 20 turns) on each request so the AI remembers earlier messages within the same session.
2 parents ca993f0 + 91229fc commit 807ad83

File tree

3 files changed

+42
-9
lines changed

3 files changed

+42
-9
lines changed

backend/app.py

Lines changed: 22 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,7 @@ def chat():
114114
try:
115115
data = request.get_json()
116116
message = data.get('message', '')
117+
history = data.get('history', [])[-20:] # cap at 20 messages
117118

118119
if not message:
119120
return jsonify({'error': 'Message is required'}), 400
@@ -211,12 +212,18 @@ def chat():
211212
- Always respond in the same language as the user's question and respond as if you are {personal_info['name']}.
212213
"""
213214

215+
# Build messages with conversation history
216+
openai_messages = [{"role": "system", "content": final_answer_prompt}]
217+
for h in history:
218+
role = h.get('role', 'user')
219+
if role in ('user', 'assistant'):
220+
openai_messages.append({"role": role, "content": h.get('content', '')})
221+
openai_messages.append({"role": "user", "content": message})
222+
214223
# Call OpenAI API for the final answer
215224
final_response = client.chat.completions.create(
216225
model="gpt-4o-mini",
217-
messages=[
218-
{"role": "system", "content": final_answer_prompt}
219-
],
226+
messages=openai_messages,
220227
temperature=0.5,
221228
max_tokens=1000,
222229
)
@@ -249,6 +256,7 @@ def chat_stream():
249256
try:
250257
data = request.get_json()
251258
message = data.get('message', '')
259+
history = data.get('history', [])[-20:] # cap at 20 messages
252260

253261
if not message:
254262
return jsonify({'error': 'Message is required'}), 400
@@ -319,16 +327,25 @@ def chat_stream():
319327
- Always respond in the same language as the user's question and respond as if you are {personal_info['name']}.
320328
"""
321329

322-
# Capture user_id in closure before streaming
330+
# Build messages with conversation history
331+
stream_messages = [{"role": "system", "content": final_answer_prompt}]
332+
for h in history:
333+
role = h.get('role', 'user')
334+
if role in ('user', 'assistant'):
335+
stream_messages.append({"role": role, "content": h.get('content', '')})
336+
stream_messages.append({"role": "user", "content": message})
337+
338+
# Capture variables in closure before streaming
323339
captured_user_id = user_id
324340
captured_message = message
341+
captured_stream_messages = stream_messages
325342

326343
def generate():
327344
full_response = []
328345
try:
329346
stream = client.chat.completions.create(
330347
model="gpt-4o-mini",
331-
messages=[{"role": "system", "content": final_answer_prompt}],
348+
messages=captured_stream_messages,
332349
temperature=0.5,
333350
max_tokens=1000,
334351
stream=True,

src/components/ChatSection.tsx

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import { useState, useEffect, useRef } from "react";
22
import { AlertCircle, Download } from "lucide-react";
33
import MessageBubble from "./MessageBubble";
4-
import { sendMessageStream, checkHealth } from "@/utils/api";
4+
import { sendMessageStream, checkHealth, type ConversationMessage } from "@/utils/api";
55
import { toast } from "sonner";
66
import type { Message } from "@/types/chat";
77
import { useLang } from "@/i18n/LanguageContext";
@@ -152,6 +152,15 @@ const ChatSection = () => {
152152
timestamp: new Date(),
153153
};
154154

155+
// Build conversation history for the backend (exclude initial greeting, cap at 20 messages)
156+
const history: ConversationMessage[] = messages
157+
.filter((m) => m.id !== INITIAL_MSG_ID)
158+
.slice(-20)
159+
.map((m) => ({
160+
role: m.isUser ? 'user' : 'assistant',
161+
content: m.text,
162+
}));
163+
155164
setMessages((prev) => [...prev, userMessage]);
156165
setInputValue("");
157166
setIsLoading(true);
@@ -161,6 +170,7 @@ const ChatSection = () => {
161170

162171
await sendMessageStream(
163172
messageText,
173+
history,
164174
// onChunk: first chunk creates the message, subsequent ones append
165175
(chunk) => {
166176
setIsLoading(false);

src/utils/api.ts

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,14 +7,19 @@ export interface ChatResponse {
77
error?: string;
88
}
99

10-
export async function sendMessage(message: string): Promise<ChatResponse> {
10+
export interface ConversationMessage {
11+
role: 'user' | 'assistant';
12+
content: string;
13+
}
14+
15+
export async function sendMessage(message: string, history: ConversationMessage[] = []): Promise<ChatResponse> {
1116
try {
1217
const response = await fetch(`${API_BASE_URL}/api/chat`, {
1318
method: 'POST',
1419
headers: {
1520
'Content-Type': 'application/json',
1621
},
17-
body: JSON.stringify({ message }),
22+
body: JSON.stringify({ message, history }),
1823
});
1924

2025
if (!response.ok) {
@@ -35,6 +40,7 @@ export async function sendMessage(message: string): Promise<ChatResponse> {
3540

3641
export async function sendMessageStream(
3742
message: string,
43+
history: ConversationMessage[],
3844
onChunk: (chunk: string) => void,
3945
onDone: () => void,
4046
onError: (error: string) => void
@@ -43,7 +49,7 @@ export async function sendMessageStream(
4349
const response = await fetch(`${API_BASE_URL}/api/chat/stream`, {
4450
method: 'POST',
4551
headers: { 'Content-Type': 'application/json' },
46-
body: JSON.stringify({ message }),
52+
body: JSON.stringify({ message, history }),
4753
});
4854

4955
if (!response.ok) {

0 commit comments

Comments
 (0)