-
-
Notifications
You must be signed in to change notification settings - Fork 1.1k
Expand file tree
/
Copy pathexample_custom_tools.py
More file actions
151 lines (134 loc) · 4.95 KB
/
example_custom_tools.py
File metadata and controls
151 lines (134 loc) · 4.95 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
from praisonaiagents import Agent, Task, PraisonAIAgents
from typing import List, Dict, Union
from duckduckgo_search import DDGS
from langchain_community.tools import YouTubeSearchTool
from langchain_community.utilities import WikipediaAPIWrapper
# 1. Tool
def internet_search_tool(query: str) -> List[Dict]:
"""
Perform a search using DuckDuckGo.
Args:
query (str): The search query.
Returns:
list: A list of search result titles, URLs, and snippets.
"""
try:
results = []
ddgs = DDGS()
for result in ddgs.text(keywords=query, max_results=10):
results.append({
"title": result.get("title", ""),
"url": result.get("href", ""),
"snippet": result.get("body", "")
})
return results
except Exception as e:
print(f"Error during DuckDuckGo search: {e}")
return []
def youtube_search_tool(query: str, inspect: bool = False, max_results: int = 2):
"""
Provide a custom wrapper around the YouTubeSearchTool.
Args:
query (str): The search query for YouTube.
inspect (bool): If True, returns tool inspection info instead of search results.
max_results (int): Maximum number of results to return (default: 2).
Returns:
Union[List[str], dict]: List of YouTube video URLs or tool inspection info.
"""
yt = YouTubeSearchTool()
if inspect:
inspection_info = {
"type": type(yt),
"attributes": [attr for attr in dir(yt) if not attr.startswith('_')],
"methods": {
"run": getattr(yt, 'run', None),
"arun": getattr(yt, 'arun', None)
},
"properties": {
"name": getattr(yt, 'name', 'youtube_search'),
"description": getattr(yt, 'description', 'Search YouTube videos'),
"return_direct": getattr(yt, 'return_direct', False)
}
}
return inspection_info
# Format query with max_results
formatted_query = f"{query}, {max_results}"
return yt.run(formatted_query)
def wikipedia_search_tool(query: str, inspect: bool = False, max_chars: int = 4000, top_k: int = 3):
"""
Provide a custom wrapper around langchain_community's WikipediaAPIWrapper.
Args:
query (str): A search query for Wikipedia.
inspect (bool): If True, returns tool inspection info instead of search results.
max_chars (int): Maximum characters to return (default: 4000).
top_k (int): Number of top results to consider (default: 3).
Returns:
Union[str, dict]: Summary from Wikipedia or tool inspection info if inspect=True.
"""
w = WikipediaAPIWrapper(
top_k_results=top_k,
doc_content_chars_max=max_chars,
lang='en'
)
if inspect:
inspection_info = {
"type": type(w),
"attributes": [attr for attr in dir(w) if not attr.startswith('_')],
"methods": {
"run": getattr(w, 'run', None),
"arun": getattr(w, 'arun', None)
},
"properties": {
"name": "wikipedia",
"description": "Search and get summaries from Wikipedia",
"top_k": w.top_k_results,
"lang": w.lang,
"max_chars": w.doc_content_chars_max
}
}
return inspection_info
try:
result = w.run(query)
return result
except Exception as e:
return f"Error searching Wikipedia: {str(e)}"
# 2. Agent
data_agent = Agent(
name="DataCollector",
role="Search Specialist",
goal="Perform internet searches to collect relevant information.",
backstory="Expert in finding and organizing internet data from multiple sources.",
tools=[internet_search_tool, youtube_search_tool, wikipedia_search_tool],
self_reflect=False
)
# 3. Tasks
collect_task = Task(
description="Perform an internet search using the query: 'AI job trends in 2024'. Return results as a list of title, URL, and snippet.",
expected_output="List of search results with titles, URLs, and snippets.",
agent=data_agent,
name="collect_data",
is_start=True,
next_tasks=["validate_data"]
)
validate_task = Task(
description="""Validate the collected data. Check if:
1. At least 5 results are returned.
2. Each result contains a title and a URL.
Return validation_result as 'valid' or 'invalid' only no other text.""",
expected_output="Validation result indicating if data is valid or invalid.",
agent=data_agent,
name="validate_data",
task_type="decision",
condition={
"valid": [], # End the workflow on valid data
"invalid": ["collect_data"] # Retry data collection on invalid data
},
)
# 4. Workflow
agents = PraisonAIAgents(
agents=[data_agent],
tasks=[collect_task, validate_task],
verbose=1,
process="workflow"
)
agents.start()