Skip to content

Commit 0ff44b4

Browse files
committed
fix: fix log persistence issues with concurrent question requests
#940
1 parent ff60e1f commit 0ff44b4

File tree

1 file changed

+11
-8
lines changed

1 file changed

+11
-8
lines changed

backend/apps/chat/task/llm.py

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -70,21 +70,19 @@ class LLMService:
7070
record: ChatRecord
7171
config: LLMConfig
7272
llm: BaseChatModel
73-
sql_message: List[Union[BaseMessage, dict[str, Any]]] = []
74-
chart_message: List[Union[BaseMessage, dict[str, Any]]] = []
73+
sql_message: List[Union[BaseMessage, dict[str, Any]]]
74+
chart_message: List[Union[BaseMessage, dict[str, Any]]]
7575

7676
# session: Session = db_session
7777
current_user: CurrentUser
7878
current_assistant: Optional[CurrentAssistant] = None
7979
out_ds_instance: Optional[AssistantOutDs] = None
8080
change_title: bool = False
8181

82-
generate_sql_logs: List[ChatLog] = []
83-
generate_chart_logs: List[ChatLog] = []
84-
85-
current_logs: dict[OperationEnum, ChatLog] = {}
86-
87-
chunk_list: List[str] = []
82+
generate_sql_logs: List[ChatLog]
83+
generate_chart_logs: List[ChatLog]
84+
current_logs: dict[OperationEnum, ChatLog]
85+
chunk_list: List[str]
8886
future: Future
8987

9088
trans: I18nHelper = None
@@ -98,6 +96,11 @@ class LLMService:
9896
def __init__(self, session: Session, current_user: CurrentUser, chat_question: ChatQuestion,
9997
current_assistant: Optional[CurrentAssistant] = None, no_reasoning: bool = False,
10098
embedding: bool = False, config: LLMConfig = None):
99+
self.sql_message = []
100+
self.chart_message = []
101+
self.generate_sql_logs = []
102+
self.generate_chart_logs = []
103+
self.current_logs = {}
101104
self.chunk_list = []
102105
self.current_user = current_user
103106
self.current_assistant = current_assistant

0 commit comments

Comments
 (0)