Skip to content

Commit 7ba0554

Browse files
authored
Merge pull request #9 from UiPath/test/add_init_tests
tests: add cli init config tests
2 parents be48e88 + 97d6ed0 commit 7ba0554

5 files changed

Lines changed: 255 additions & 6 deletions

File tree

samples/quickstart-agent/uv.lock

Lines changed: 5 additions & 5 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

tests/__init__.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
"""
2+
Test suite for UiPath LlamaIndex SDK.
3+
Contains test cases for all CLI commands and utilities.
4+
"""

tests/cli/test_init.py

Lines changed: 227 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,227 @@
1+
import json
2+
import os
3+
4+
from click.testing import CliRunner
5+
from uipath._cli.cli_init import init # type: ignore
6+
7+
8+
class TestInit:
9+
def test_init_basic_config_generation(
10+
self, runner: CliRunner, temp_dir: str
11+
) -> None:
12+
"""Test configuration file generation with StartEvent and StopEvent."""
13+
with runner.isolated_filesystem(temp_dir=temp_dir):
14+
# Create agent script
15+
script_content = """
16+
from llama_index.core.workflow import (
17+
Event,
18+
StartEvent,
19+
StopEvent,
20+
Workflow,
21+
step,
22+
)
23+
from llama_index.llms.openai import OpenAI
24+
25+
class JokeEvent(Event):
26+
joke: str
27+
28+
class JokeFlow(Workflow):
29+
llm = OpenAI()
30+
31+
@step
32+
async def generate_joke(self, ev: StartEvent) -> JokeEvent:
33+
topic = ev.topic
34+
35+
prompt = f"Write your best joke about {topic}."
36+
response = await self.llm.acomplete(prompt)
37+
return JokeEvent(joke=str(response))
38+
39+
@step
40+
async def critique_joke(self, ev: JokeEvent) -> StopEvent:
41+
joke = ev.joke
42+
43+
prompt = f"Give a thorough analysis and critique of the following joke: {joke}"
44+
response = await self.llm.acomplete(prompt)
45+
return StopEvent(result=str(response))
46+
47+
agent = JokeFlow(timeout=60, verbose=False)
48+
"""
49+
with open("main.py", "w") as f:
50+
f.write(script_content)
51+
52+
llama_config = """
53+
{
54+
"dependencies": ["."],
55+
"workflows": {
56+
"agent": "main.py:agent"
57+
},
58+
"env": ".env"
59+
}
60+
"""
61+
with open("llama_index.json", "w") as f:
62+
f.write(llama_config)
63+
64+
result = runner.invoke(init)
65+
assert result.exit_code == 0
66+
assert os.path.exists("uipath.json")
67+
68+
with open("uipath.json", "r") as f:
69+
config = json.load(f)
70+
71+
# Verify config structure
72+
assert "entryPoints" in config
73+
assert "bindings" in config
74+
75+
# Verify entryPoints properties
76+
entry = config["entryPoints"][0]
77+
assert entry["filePath"] == "agent"
78+
assert entry["type"] == "agent"
79+
80+
# Verify input schema
81+
assert "input" in entry
82+
input_schema = entry["input"]
83+
assert input_schema["type"] == "object"
84+
assert "properties" in input_schema
85+
assert "required" in input_schema
86+
assert isinstance(input_schema["properties"], dict)
87+
assert isinstance(input_schema["required"], list)
88+
89+
# Verify output schema
90+
assert "output" in entry
91+
output_schema = entry["output"]
92+
assert output_schema["type"] == "object"
93+
assert "properties" in output_schema
94+
assert "required" in output_schema
95+
96+
# Verify bindings
97+
assert config["bindings"]["version"] == "2.0"
98+
assert "resources" in config["bindings"]
99+
assert isinstance(config["bindings"]["resources"], list)
100+
101+
def test_init_custom_config_generation(
102+
self, runner: CliRunner, temp_dir: str
103+
) -> None:
104+
"""Test configuration file generation with custom StartEvent and StopEvent."""
105+
with runner.isolated_filesystem(temp_dir=temp_dir):
106+
# Create agent script
107+
script_content = """
108+
from typing import Optional
109+
110+
from llama_index.core.workflow import (
111+
Event,
112+
StartEvent,
113+
StopEvent,
114+
Workflow,
115+
step,
116+
)
117+
from llama_index.llms.openai import OpenAI
118+
119+
class TopicEvent(StartEvent):
120+
topic: str
121+
param: Optional[str] = None
122+
123+
class JokeEvent(Event):
124+
joke: str
125+
126+
class CritiqueEvent(StopEvent):
127+
joke: str
128+
critique: str
129+
param: Optional[str] = None
130+
131+
class JokeFlow(Workflow):
132+
llm = OpenAI()
133+
134+
@step
135+
async def generate_joke(self, ev: TopicEvent) -> JokeEvent:
136+
topic = ev.topic
137+
138+
prompt = f"Write your best joke about {topic}."
139+
response = await self.llm.acomplete(prompt)
140+
return JokeEvent(joke=str(response))
141+
142+
@step
143+
async def critique_joke(self, ev: JokeEvent) -> CritiqueEvent:
144+
joke = ev.joke
145+
146+
prompt = f"Give a thorough analysis and critique of the following joke: {joke}"
147+
response = await self.llm.acomplete(prompt)
148+
return CritiqueEvent(joke=joke, critique=str(response))
149+
150+
agent = JokeFlow(timeout=60, verbose=False)
151+
"""
152+
with open("main.py", "w") as f:
153+
f.write(script_content)
154+
155+
llama_config = """
156+
{
157+
"dependencies": ["."],
158+
"workflows": {
159+
"agent": "main.py:agent"
160+
},
161+
"env": ".env"
162+
}
163+
"""
164+
with open("llama_index.json", "w") as f:
165+
f.write(llama_config)
166+
167+
result = runner.invoke(init)
168+
assert result.exit_code == 0
169+
assert os.path.exists("uipath.json")
170+
171+
with open("uipath.json", "r") as f:
172+
config = json.load(f)
173+
174+
# Verify config structure
175+
assert "entryPoints" in config
176+
assert "bindings" in config
177+
178+
# Verify entryPoints properties
179+
entry = config["entryPoints"][0]
180+
assert entry["filePath"] == "agent"
181+
assert entry["type"] == "agent"
182+
183+
# Verify input schema
184+
assert "input" in entry
185+
input_schema = entry["input"]
186+
assert input_schema["type"] == "object"
187+
assert "properties" in input_schema
188+
assert "required" in input_schema
189+
190+
# Verify input properties
191+
props = input_schema["properties"]
192+
assert "topic" in props
193+
assert props["topic"]["type"] == "string"
194+
195+
assert "param" in props
196+
assert props["param"]["type"] == "string"
197+
assert props["param"]["nullable"]
198+
199+
# Verify required fields in input
200+
assert input_schema["required"] == ["topic"]
201+
202+
# Verify output schema
203+
assert "output" in entry
204+
output_schema = entry["output"]
205+
assert output_schema["type"] == "object"
206+
assert "properties" in output_schema
207+
assert "required" in output_schema
208+
209+
# Verify output properties
210+
out_props = output_schema["properties"]
211+
assert "joke" in out_props
212+
assert out_props["joke"]["type"] == "string"
213+
214+
assert "critique" in out_props
215+
assert out_props["critique"]["type"] == "string"
216+
217+
assert "param" in out_props
218+
assert out_props["param"]["type"] == "string"
219+
assert out_props["param"]["nullable"]
220+
221+
# Verify required fields in output
222+
assert output_schema["required"] == ["joke", "critique"]
223+
224+
# Verify bindings
225+
assert config["bindings"]["version"] == "2.0"
226+
assert "resources" in config["bindings"]
227+
assert isinstance(config["bindings"]["resources"], list)

tests/conftest.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
import tempfile
2+
from typing import Generator
3+
4+
import pytest
5+
from click.testing import CliRunner
6+
7+
8+
@pytest.fixture
9+
def runner() -> CliRunner:
10+
"""Provide a Click CLI test runner."""
11+
return CliRunner()
12+
13+
14+
@pytest.fixture
15+
def temp_dir() -> Generator[str, None, None]:
16+
"""Provide a temporary directory for test files."""
17+
with tempfile.TemporaryDirectory() as tmp_dir:
18+
yield tmp_dir

uv.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)