Skip to content

Commit 7c8884d

Browse files
fix: add fallback text for thought extraction
- prevents flicker in UI when escaped output_str is initially empty or if no thought is provided by LLM - italicize "thinking" text Signed-off-by: Patrick Chin <8509935+thepatrickchin@users.noreply.github.com>
1 parent 87b7912 commit 7c8884d

1 file changed

Lines changed: 9 additions & 12 deletions

File tree

packages/nvidia_nat_core/src/nat/front_ends/fastapi/step_adaptor.py

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -73,20 +73,20 @@ def _step_matches_filter(self, step: IntermediateStep, config: StepAdaptorConfig
7373
return False
7474

7575
@staticmethod
76-
def _extract_react_thought(output: str) -> str | None:
76+
def _extract_react_thought(output: str, fallback: str = "_Thinking..._") -> str:
7777
"""Extract ReAct-style 'Thought: ...' from LLM output for thought process display."""
7878
if not output or not isinstance(output, str):
79-
return None
79+
return fallback
8080
# Match "Thought:" (case-insensitive, optional colon) and capture until Action/Final Answer/end
8181
match = re.search(
8282
r"thought\s*:?\s*(.*?)(?=\s*(?:action\s*\d*\s*:|final\s+answer\s*:)|$)",
8383
output,
8484
re.DOTALL | re.IGNORECASE,
8585
)
8686
if not match:
87-
return None
87+
return fallback
8888
text = match.group(1).strip()
89-
return text if text else None
89+
return text if text else fallback
9090

9191
def _handle_llm(self, step: IntermediateStepPayload, ancestry: InvocationNode) -> ResponseSerializable | None:
9292
input_str: str | None = None
@@ -141,14 +141,11 @@ def _handle_llm(self, step: IntermediateStepPayload, ancestry: InvocationNode) -
141141
thought_text = None
142142
if step.event_type == IntermediateStepType.LLM_START:
143143
# Show "Thinking..." placeholder at START
144-
thought_text = "Thinking..."
145-
elif step.event_type == IntermediateStepType.LLM_NEW_TOKEN and output_str:
146-
# Try to extract partial thought from streaming tokens
147-
extracted = self._extract_react_thought(output_str)
148-
thought_text = extracted if extracted else "Thinking..."
149-
elif step.event_type == IntermediateStepType.LLM_END and output_str:
150-
# END will show complete thought
151-
thought_text = self._extract_react_thought(output_str)
144+
thought_text = "_Thinking..._"
145+
elif step.event_type == IntermediateStepType.LLM_NEW_TOKEN:
146+
thought_text = self._extract_react_thought(output_str or "", fallback="_Thinking..._")
147+
elif step.event_type == IntermediateStepType.LLM_END:
148+
thought_text = self._extract_react_thought(output_str or "", fallback="Completed thought.")
152149

153150
event = ResponseIntermediateStep(id=step.UUID,
154151
name=step.name or "",

0 commit comments

Comments
 (0)