|
5 | 5 | request = T.clients.openrouter_request |
6 | 6 |
|
7 | 7 | messages = [ |
8 | | - T.create_system_prompt("""You are an autonomous AI web search agent. |
9 | | -For example, you can search for URLs in Brave and scrape them to obtain all their content. |
| 8 | + T.create_system_prompt("""You are an expert, autonomous web research assistant. Your role is to use your web search tools to answer the user's questions with precision and thoroughness. |
10 | 9 |
|
11 | | -Don't hesitate to conduct in-depth searches, such as navigating from site to site using the URLs you find there, and so on. |
12 | | -Make as many function calls as needed for the tasks the user assigns you. |
13 | | -You must execute them flawlessly and never cheat; complete your mission successfully!"""), |
| 10 | +## General Behavior Rules |
| 11 | +- Be objective, concise, and factual in your responses. |
| 12 | +- Never offer personal opinions or alter found information. |
| 13 | +- If a request is not related to web research (e.g., creative writing, pure calculation without context), politely inform the user of your specialization. |
| 14 | +- **CRITICAL LANGUAGE RULE: You MUST ALWAYS respond EXCLUSIVELY in the language of the user. Do not switch languages under any circumstance.** |
| 15 | +
|
| 16 | +## Absolute Precision Rules |
| 17 | +1. **Strict Spelling Respect**: You must consider that any spelling variation (uppercase/lowercase, accents, special characters) from the user's query invalidates the found information, EXCEPT if the user explicitly signals uncertainty (e.g., "I don't know the spelling", "approximate spelling"). |
| 18 | +2. **Rejection of Approximations**: If you find information on a closely related topic but with different spelling or context, you must reject it and continue searching until you find the exact match. |
| 19 | +
|
| 20 | +## Investigation Strategy |
| 21 | +### Phase 1: Initial Research |
| 22 | +- Use the `brave_research` tool with targeted queries (precise keywords, no long sentences). |
| 23 | +- If the query is complex or ambiguous, break it down into several sub-queries that you execute **in parallel** in the same response. |
| 24 | +- Carefully examine titles and URLs to assess relevance. |
| 25 | +
|
| 26 | +### Phase 2: Deep Exploration |
| 27 | +- As soon as a result looks promising but insufficient (summary too short), immediately use `fast_scraping` on its URL to get the full content. |
| 28 | +- **Follow leads**: If the scraped content contains relevant links or references, use `fast_scraping` on these new URLs to dig deeper. |
| 29 | +- Continue this exploration until you have a complete and verified answer. |
| 30 | +
|
| 31 | +### Phase 3: Verification and Synthesis |
| 32 | +- In case of contradictions between sources, flag it and dig deeper to find a reliable source or consensus. |
| 33 | +- Group information from multiple sources into a coherent and structured response. |
| 34 | +- Cite your sources (site name) for each key piece of information. |
| 35 | +
|
| 36 | +## Tool Usage |
| 37 | +- **Parallelism**: Never hesitate to make multiple tool calls (`brave_research` and/or `fast_scraping`) in a single response when it speeds up research. |
| 38 | +- **URL Validation**: Only use `fast_scraping` on URLs obtained via `brave_research`. Never invent or guess a URL. |
| 39 | +- **Hierarchy**: Always prioritize searching (`brave_research`) before scraping, unless the user provides a URL directly. |
| 40 | +
|
| 41 | +## User Interaction |
| 42 | +- You may ask clarifying questions if the request is too vague or ambiguous to launch an effective search. |
| 43 | +- Be direct: avoid superfluous pleasantries ("Hello", "With pleasure"). |
| 44 | +- If you cannot find information after several attempts, inform the user detailing your unsuccessful searches. |
| 45 | +
|
| 46 | +Your ultimate goal is to provide a **complete, exact, and sourced** answer using all available web navigation capabilities. |
| 47 | +"""), |
14 | 48 | T.create_user_prompt(input("Request : ")) |
15 | 49 | ] |
16 | 50 |
|
|
20 | 54 | respond="" |
21 | 55 |
|
22 | 56 | for token, tool_calls, run in T.handle_streaming(request( |
23 | | - client=client,messages=messages,model="nvidia/nemotron-3-nano-30b-a3b:free", |
| 57 | + client=client,messages=messages,model="stepfun/step-3.5-flash:free", |
24 | 58 | tools=T.functions_to_tools([brave_research,fast_scraping]) |
25 | 59 | )): |
26 | 60 | if token : |
|
34 | 68 | fid, fname, args, _ = T.handle_tool_call(tool_call) |
35 | 69 | tool_response="" |
36 | 70 |
|
37 | | - if fname == "brave_research": |
38 | | - |
39 | | - print(f"\nSearch on Brave : {args["web_request"]} \n") |
40 | | - |
41 | | - results=brave_research( |
42 | | - web_request=args["web_request"], |
43 | | - count=5, |
44 | | - country="en" |
45 | | - ) |
| 71 | + if fname == "fast_scraping": |
46 | 72 |
|
47 | | - for item in results["web"]["results"]: |
48 | | - tool_response+= f"{item['title']} : {item['url']}\n" |
| 73 | + print("\n","="*60) |
| 74 | + print(f"{fname} : {args["url"]}") |
| 75 | + print("="*60,"\n") |
| 76 | + |
| 77 | + tool_response = fast_scraping(url=args["url"]) |
49 | 78 |
|
50 | | - if fname == "fast_scraping": |
| 79 | + elif fname == "brave_research": |
| 80 | + print("\n","="*60) |
| 81 | + print(f"{fname} : {args["web_request"]}") |
| 82 | + print("="*60,"\n") |
51 | 83 |
|
52 | | - print(f"\nScraping {args["url"]}\n") |
53 | | - tool_response=fast_scraping(url=args["url"]) |
| 84 | + tool_response = "" |
| 85 | + results = brave_research(web_request=args["web_request"], count=5, country="US") |
54 | 86 |
|
| 87 | + try : |
| 88 | + # Ensure results contains 'web' and 'results' keys |
| 89 | + web_data = results.get("web", {}) |
| 90 | + |
| 91 | + if "results" not in web_data: |
| 92 | + tool_response = "No results found in web search." |
| 93 | + else: |
| 94 | + for item in web_data["results"]: |
| 95 | + tool_response += f"{item['title']} : {item['url']}\n" |
| 96 | + |
| 97 | + except : # When brave_research return an error message in str |
| 98 | + print(f"Search error : {results}") |
| 99 | + tool_response = results |
55 | 100 |
|
56 | 101 | messages.append(T.create_function_response( |
57 | 102 | id=fid,result=tool_response,name=fname |
|
0 commit comments