Skip to content

Commit f52198a

Browse files
fix: implement CodeRabbit suggestions
- Use proper fixture naming and declaration convention - Avoid possessive 's with inanimate objects - Hardcoded name breaks consistency with emit_thought_start and emit_thought_chunk - Assert the END events in the concurrent lifecycle test. - Drop the unused make_intermediate_step fixture fom tests - Instantiate the profiler callback per request, or protect state mutations with locks and add cleanup Signed-off-by: Patrick Chin <8509935+thepatrickchin@users.noreply.github.com>
1 parent 1925cb8 commit f52198a

File tree

5 files changed

+51
-29
lines changed

5 files changed

+51
-29
lines changed

docs/source/build-workflows/workflow-configuration.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ The `functions` section contains the tools used in the workflow, in our example
7575

7676
Agent responses in the NeMo Agent Toolkit UI include a thought process display which shows a step-by-step view of what the workflow is doing.
7777

78-
To customize the text shown for a function in the thought process display, set `thought_description` in that function's configuration. Unlike `description`, which provides the tool's help text that the agent uses to decide when and how to call the tool, `thought_description` only affects the label shown in the thought process display. For example:
78+
To customize the text shown for a function in the thought process display, set the `thought_description` attribute. Unlike `description`, which provides help text that the agent uses to decide when and how to call the tool, `thought_description` only affects the message shown in the thought process display UI as well as in observability traces. For example:
7979

8080
```yaml
8181
functions:

packages/nvidia_nat_core/src/nat/builder/thought.py

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ def emit_thought_start(context: "Context", thought_text: str, name: str | None =
8787
return thought_uuid
8888

8989

90-
def emit_thought_chunk(context: "Context", thought_uuid: str, thought_text: str) -> None:
90+
def emit_thought_chunk(context: "Context", thought_uuid: str, thought_text: str, name: str | None = None) -> None:
9191
"""Emit an update to a streaming thought started with emit_thought_start().
9292
9393
This updates the thought text in the UI, useful for showing progress updates.
@@ -96,24 +96,33 @@ def emit_thought_chunk(context: "Context", thought_uuid: str, thought_text: str)
9696
context: The NAT context object (obtain via Context.get())
9797
thought_uuid: The UUID returned from emit_thought_start()
9898
thought_text: The updated thought text to display
99+
name: Optional name for the thought (defaults to "custom_thought", should match emit_thought_start)
99100
"""
101+
thought_name = name or "custom_thought"
102+
100103
context.intermediate_step_manager.push_intermediate_step(
101104
IntermediateStepPayload(UUID=thought_uuid,
102105
event_type=IntermediateStepType.SPAN_CHUNK,
103-
name="custom_thought",
106+
name=thought_name,
104107
data=StreamEventData(chunk=thought_text)))
105108

106109

107-
def emit_thought_end(context: "Context", thought_uuid: str, thought_text: str | None = None) -> None:
110+
def emit_thought_end(context: "Context",
111+
thought_uuid: str,
112+
thought_text: str | None = None,
113+
name: str | None = None) -> None:
108114
"""Complete a streaming thought started with emit_thought_start().
109115
110116
Args:
111117
context: The NAT context object (obtain via Context.get())
112118
thought_uuid: The UUID returned from emit_thought_start()
113119
thought_text: Optional final thought text (if None, keeps the last chunk text)
120+
name: Optional name for the thought (defaults to "custom_thought", should match emit_thought_start)
114121
"""
122+
thought_name = name or "custom_thought"
123+
115124
context.intermediate_step_manager.push_intermediate_step(
116125
IntermediateStepPayload(UUID=thought_uuid,
117126
event_type=IntermediateStepType.SPAN_END,
118-
name="custom_thought",
127+
name=thought_name,
119128
data=StreamEventData(output=thought_text)))

packages/nvidia_nat_core/tests/nat/builder/test_thought.py

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def __init__(self, name="fn", fid=None, parent_id=None, parent_name=None):
4747

4848

4949
@pytest.fixture(name="ctx_state")
50-
def ctx_state_fixture():
50+
def fixture_ctx_state():
5151
"""Fresh context state for each test."""
5252
s = ContextState()
5353

@@ -59,12 +59,12 @@ def ctx_state_fixture():
5959

6060

6161
@pytest.fixture(name="output_steps")
62-
def output_steps_fixture():
62+
def fixture_output_steps():
6363
return []
6464

6565

6666
@pytest.fixture(name="ctx")
67-
def ctx_fixture(ctx_state: ContextState, output_steps):
67+
def fixture_ctx(ctx_state: ContextState, output_steps):
6868
"""Context with intermediate step manager subscribed to output_steps."""
6969
ctx = Context(ctx_state)
7070

@@ -237,6 +237,15 @@ def test_multiple_concurrent_thoughts(ctx: Context, output_steps: list[Intermedi
237237
assert output_steps[0].payload.UUID == uuid1
238238
assert output_steps[1].payload.UUID == uuid2
239239

240+
output_steps.clear()
241+
240242
# Pop in reverse order (LIFO)
241243
emit_thought_end(ctx, uuid2)
242244
emit_thought_end(ctx, uuid1)
245+
246+
# Should have 2 END events with correct UUIDs in LIFO order
247+
assert len(output_steps) == 2
248+
assert output_steps[0].payload.UUID == uuid2
249+
assert output_steps[0].payload.event_type == IntermediateStepType.SPAN_END
250+
assert output_steps[1].payload.UUID == uuid1
251+
assert output_steps[1].payload.event_type == IntermediateStepType.SPAN_END

packages/nvidia_nat_core/tests/nat/front_ends/fastapi/test_step_adaptor.py

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -26,14 +26,14 @@
2626
from nat.front_ends.fastapi.step_adaptor import StepAdaptor
2727

2828

29-
@pytest.fixture
30-
def default_config():
29+
@pytest.fixture(name="default_config")
30+
def fixture_default_config():
3131
"""Return a default StepAdaptorConfig object (mode=DEFAULT)."""
3232
return StepAdaptorConfig(mode=StepAdaptorMode.DEFAULT, custom_event_types=[])
3333

3434

35-
@pytest.fixture
36-
def custom_config():
35+
@pytest.fixture(name="custom_config")
36+
def fixture_custom_config():
3737
"""Return a custom StepAdaptorConfig object (mode=CUSTOM) with custom types."""
3838
return StepAdaptorConfig(
3939
mode=StepAdaptorMode.CUSTOM,
@@ -44,8 +44,8 @@ def custom_config():
4444
)
4545

4646

47-
@pytest.fixture
48-
def disabled_config():
47+
@pytest.fixture(name="disabled_config")
48+
def fixture_disabled_config():
4949
"""Return a custom StepAdaptorConfig object that disables intermediate steps."""
5050
return StepAdaptorConfig(
5151
mode=StepAdaptorMode.OFF,
@@ -56,26 +56,26 @@ def disabled_config():
5656
)
5757

5858

59-
@pytest.fixture
60-
def step_adaptor_default(default_config):
59+
@pytest.fixture(name="step_adaptor_default")
60+
def fixture_step_adaptor_default(default_config):
6161
"""Return a StepAdaptor using the default config."""
6262
return StepAdaptor(config=default_config)
6363

6464

65-
@pytest.fixture
66-
def step_adaptor_custom(custom_config):
65+
@pytest.fixture(name="step_adaptor_custom")
66+
def fixture_step_adaptor_custom(custom_config):
6767
"""Return a StepAdaptor using the custom config."""
6868
return StepAdaptor(config=custom_config)
6969

7070

71-
@pytest.fixture
72-
def step_adaptor_disabled(disabled_config):
71+
@pytest.fixture(name="step_adaptor_disabled")
72+
def fixture_step_adaptor_disabled(disabled_config):
7373
"""Return a StepAdaptor using the disabled config."""
7474
return StepAdaptor(config=disabled_config)
7575

7676

77-
@pytest.fixture
78-
def make_intermediate_step():
77+
@pytest.fixture(name="make_intermediate_step")
78+
def fixture_make_intermediate_step():
7979
"""A factory fixture to create an IntermediateStep with minimal defaults."""
8080

8181
def _make_step(event_type: IntermediateStepType, data_input=None, data_output=None, name=None, UUID=None):
@@ -670,7 +670,7 @@ def test_function_events_in_custom_mode(step_adaptor_custom, make_intermediate_s
670670
# --------------------
671671
# Tests for thought_description override
672672
# --------------------
673-
def test_function_start_with_thought_description_override(step_adaptor_default, make_intermediate_step):
673+
def test_function_start_with_thought_description_override(step_adaptor_default):
674674
"""FUNCTION_START events use custom thought_description from metadata when provided."""
675675
custom_thought = "Authenticating user credentials"
676676
payload = IntermediateStepPayload(
@@ -693,7 +693,7 @@ def test_function_start_with_thought_description_override(step_adaptor_default,
693693
assert result.thought_text == f"{custom_thought}..."
694694

695695

696-
def test_function_end_with_thought_description_override(step_adaptor_default, make_intermediate_step):
696+
def test_function_end_with_thought_description_override(step_adaptor_default):
697697
"""FUNCTION_END events use custom thought_description from start event metadata."""
698698
uuid = "function-uuid-custom-end"
699699
custom_thought = "Generating SQL query"
@@ -747,7 +747,7 @@ def test_function_without_thought_description_uses_default(step_adaptor_default,
747747
assert result.thought_text == "Running function: test_function..."
748748

749749

750-
def test_tool_start_with_thought_description_override(step_adaptor_default, make_intermediate_step):
750+
def test_tool_start_with_thought_description_override(step_adaptor_default):
751751
"""TOOL_START events use custom thought_description from metadata when provided."""
752752
custom_thought = "Executing database query"
753753
payload = IntermediateStepPayload(
@@ -770,7 +770,7 @@ def test_tool_start_with_thought_description_override(step_adaptor_default, make
770770
assert result.thought_text == f"{custom_thought}..."
771771

772772

773-
def test_tool_end_with_thought_description_override(step_adaptor_default, make_intermediate_step):
773+
def test_tool_end_with_thought_description_override(step_adaptor_default):
774774
"""TOOL_END events use custom thought_description from start event metadata."""
775775
uuid = "tool-uuid-custom-end"
776776
custom_thought = "Executing database query"

packages/nvidia_nat_langchain/src/nat/plugins/langchain/agent/react_agent/register.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ async def react_agent_workflow(config: ReActAgentWorkflowConfig, builder: Builde
117117
llm=llm,
118118
prompt=prompt,
119119
tools=tools,
120-
callbacks=[LangchainProfilerHandler()],
120+
callbacks=[],
121121
use_tool_schema=config.include_tool_input_schema_in_tool_description,
122122
detailed_logs=config.verbose,
123123
log_response_max_chars=config.log_response_max_chars,
@@ -154,8 +154,12 @@ async def _response_fn(chat_request_or_message: ChatRequestOrMessage) -> ChatRes
154154

155155
state = ReActGraphState(messages=messages)
156156

157-
# run the ReAct Agent Graph
158-
state = await graph.ainvoke(state, config={'recursion_limit': (config.max_tool_calls + 1) * 2})
157+
# run the ReAct Agent Graph with a new callback handler instance per request
158+
state = await graph.ainvoke(state,
159+
config={
160+
'recursion_limit': (config.max_tool_calls + 1) * 2,
161+
'callbacks': [LangchainProfilerHandler()]
162+
})
159163
# setting recursion_limit: 4 allows 1 tool call
160164
# - allows the ReAct Agent to perform 1 cycle / call 1 single tool,
161165
# - but stops the agent when it tries to call a tool a second time

0 commit comments

Comments
 (0)