Skip to content

Commit d390621

Browse files
Merge pull request #33 from insightbuilder/fastmcp-explore
Added research code on the AI related tasks
2 parents 652b663 + baf6473 commit d390621

13 files changed

Lines changed: 2178 additions & 1 deletion

File tree

fastmcp-reddit/.gitignore

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
# Python-generated files
2+
__pycache__/
3+
*.py[oc]
4+
build/
5+
dist/
6+
wheels/
7+
*.egg-info
8+
9+
# Virtual environments
10+
.venv

fastmcp-reddit/.python-version

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
3.11

fastmcp-reddit/README.md

Whitespace-only changes.

fastmcp-reddit/client.py

Lines changed: 178 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,178 @@
1+
import asyncio
2+
from typing import Optional
3+
from contextlib import AsyncExitStack
4+
from inspect import getsource
5+
6+
# pyright: reportMissingImports=false
7+
# pyright: reportOptionalSubscript=false
8+
# pyright: reportOptionalMemberAccess=false
9+
10+
from mcp import ClientSession, StdioServerParameters
11+
from mcp.client.stdio import stdio_client
12+
13+
from anthropic import Anthropic
14+
from dotenv import load_dotenv
15+
16+
load_dotenv() # load environment variables from .env
17+
18+
19+
class MCPClient:
20+
def __init__(self):
21+
# Initialize session and client objects
22+
self.session: Optional[ClientSession] = None
23+
self.exit_stack = AsyncExitStack()
24+
self.anthropic = Anthropic()
25+
26+
# methods will go here
27+
async def connect_to_server(self, server_script_path: str):
28+
"""Connect to an MCP server
29+
30+
Args:
31+
server_script_path: Path to the server script (.py or .js)
32+
"""
33+
print("Starting Server from within client")
34+
is_python = server_script_path.endswith(".py")
35+
is_js = server_script_path.endswith(".js")
36+
if not (is_python or is_js):
37+
raise ValueError("Server script must be a .py or .js file")
38+
39+
command = "python" if is_python else "node"
40+
server_params = StdioServerParameters(
41+
command=command, args=[server_script_path], env=None
42+
)
43+
print("This is where the server is running")
44+
stdio_transport = await self.exit_stack.enter_async_context(
45+
stdio_client(server_params)
46+
)
47+
# print("got the transport...", stdio_transport)
48+
self.stdio, self.write = stdio_transport
49+
self.session = await self.exit_stack.enter_async_context(
50+
ClientSession(self.stdio, self.write)
51+
)
52+
53+
await self.session.initialize()
54+
print("session initialized")
55+
56+
# List available tools
57+
response = await self.session.list_tools()
58+
tools = response.tools
59+
# Need to get tools output if the server is up
60+
print("\nConnected to server with tools:", [tool.name for tool in tools])
61+
62+
# below code is used initially for testing the read_resource method
63+
# resource_test = await self.session.read_resource("subreddit://info")
64+
# print("Testing Resource in Client side:", resource_test)
65+
66+
# listing available prompts
67+
response = await self.session.list_prompts()
68+
prompts = response.prompts
69+
70+
test_text = await self.session.get_prompt(
71+
"reply_with_context", arguments={"query": "test", "context": "test_context"}
72+
)
73+
print(test_text.messages[0].content.text)
74+
print("\nAvailable prompts:", [prompt.name for prompt in prompts])
75+
76+
async def process_query(self, query: str) -> str:
77+
"""Process a query using Claude and available tools"""
78+
# get the tools
79+
response = await self.session.list_tools()
80+
# # get the resources
81+
# avbl_data = await self.session.read_resource("subreddit://info")
82+
# # use the resources in the prompt
83+
# context_prompt = await self.session.get_prompt(
84+
# "reply_with_context",
85+
# arguments={"context": avbl_data.contents[0].text, "query": query},
86+
# )
87+
# query_with_context = context_prompt.messages[0].content
88+
# print(query_with_context.text)
89+
# build it into the message list
90+
messages = [{"role": "user", "content": query}]
91+
available_tools = [
92+
{
93+
"name": tool.name,
94+
"description": tool.description,
95+
"input_schema": tool.inputSchema,
96+
}
97+
for tool in response.tools
98+
]
99+
100+
# Initial Claude API call
101+
response = self.anthropic.messages.create(
102+
model="claude-3-5-haiku-20241022",
103+
max_tokens=1000,
104+
messages=messages,
105+
tools=available_tools,
106+
)
107+
108+
# Process response and handle tool calls
109+
tool_results = []
110+
final_text = []
111+
112+
for content in response.content:
113+
if content.type == "text":
114+
final_text.append(content.text)
115+
elif content.type == "tool_use":
116+
tool_name = content.name
117+
tool_args = content.input
118+
119+
# Execute tool call
120+
result = await self.session.call_tool(tool_name, tool_args)
121+
tool_results.append({"call": tool_name, "result": result})
122+
final_text.append(f"[Calling tool {tool_name} with args {tool_args}]")
123+
print(f"Toolcall Result: {result.content}")
124+
# Continue conversation with tool results
125+
if hasattr(content, "text") and content.text:
126+
messages.append({"role": "assistant", "content": content.text})
127+
messages.append({"role": "user", "content": result.content})
128+
129+
# Get next response from Claude
130+
response = self.anthropic.messages.create(
131+
model="claude-3-5-haiku-20241022",
132+
max_tokens=1000,
133+
messages=messages,
134+
)
135+
136+
final_text.append(response.content[0].text)
137+
138+
return "\n".join(final_text)
139+
140+
async def chat_loop(self):
141+
"""Run an interactive chat loop"""
142+
print("Type your market research queries or 'quit' to exit.")
143+
144+
while True:
145+
try:
146+
query = input("\nQuery: ").strip()
147+
148+
if query.lower() == "quit":
149+
break
150+
151+
response = await self.process_query(query)
152+
print("\n" + response)
153+
154+
except Exception as e:
155+
print(f"\nError: {str(e)}")
156+
157+
async def cleanup(self):
158+
"""Clean up resources"""
159+
await self.exit_stack.aclose()
160+
161+
162+
async def main():
163+
if len(sys.argv) < 2:
164+
print("Usage: uv run client.py server.py")
165+
sys.exit(1)
166+
167+
client = MCPClient()
168+
try:
169+
await client.connect_to_server(sys.argv[1])
170+
await client.chat_loop()
171+
finally:
172+
await client.cleanup()
173+
174+
175+
if __name__ == "__main__":
176+
import sys
177+
178+
asyncio.run(main())

fastmcp-reddit/client_01.py

Lines changed: 190 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,190 @@
1+
import asyncio
2+
from typing import Optional
3+
from contextlib import AsyncExitStack
4+
from inspect import getsource
5+
6+
# pyright: reportMissingImports=false
7+
# pyright: reportOptionalSubscript=false
8+
# pyright: reportOptionalMemberAccess=false
9+
10+
from mcp import ClientSession, StdioServerParameters
11+
from mcp.client.stdio import stdio_client
12+
13+
from anthropic import Anthropic
14+
from dotenv import load_dotenv
15+
16+
load_dotenv() # load environment variables from .env
17+
18+
19+
class MCPClient:
20+
def __init__(self):
21+
# Initialize session and client objects
22+
self.session: Optional[ClientSession] = None
23+
self.exit_stack = AsyncExitStack()
24+
self.anthropic = Anthropic()
25+
26+
# methods will go here
27+
async def connect_to_server(self, server_script_path: str):
28+
"""Connect to an MCP server
29+
30+
Args:
31+
server_script_path: Path to the server script (.py or .js)
32+
"""
33+
print("Starting Server from within client")
34+
is_python = server_script_path.endswith(".py")
35+
is_js = server_script_path.endswith(".js")
36+
if not (is_python or is_js):
37+
raise ValueError("Server script must be a .py or .js file")
38+
39+
command = "python" if is_python else "node"
40+
server_params = StdioServerParameters(
41+
command=command, args=[server_script_path], env=None
42+
)
43+
print("This is where the server is running")
44+
stdio_transport = await self.exit_stack.enter_async_context(
45+
stdio_client(server_params)
46+
)
47+
# print("got the transport...", stdio_transport)
48+
self.stdio, self.write = stdio_transport
49+
self.session = await self.exit_stack.enter_async_context(
50+
ClientSession(self.stdio, self.write)
51+
)
52+
53+
await self.session.initialize()
54+
print("session initialized")
55+
56+
# List available tools
57+
response = await self.session.list_tools()
58+
tools = response.tools
59+
# Need to get tools output if the server is up
60+
print("\nConnected to server with tools:", [tool.name for tool in tools])
61+
62+
# below code is used initially for testing the read_resource method
63+
resource_test = await self.session.read_resource("subreddit://info")
64+
print("Testing Resource in Client side:", resource_test)
65+
66+
# listing available prompts
67+
response = await self.session.list_prompts()
68+
prompts = response.prompts
69+
print("\nAvailable prompts:", [prompt.name for prompt in prompts])
70+
71+
async def process_query(self, query: str) -> str:
72+
"""Process a query using Claude and available tools"""
73+
# get the tools
74+
response = await self.session.list_tools()
75+
76+
messages = [{"role": "user", "content": query}]
77+
available_tools = [
78+
{
79+
"name": tool.name,
80+
"description": tool.description,
81+
"input_schema": tool.inputSchema,
82+
}
83+
for tool in response.tools
84+
]
85+
86+
# Initial Claude API call
87+
response = self.anthropic.messages.create(
88+
model="claude-3-5-haiku-20241022",
89+
max_tokens=1000,
90+
messages=messages,
91+
tools=available_tools,
92+
)
93+
94+
# Process response and handle tool calls
95+
tool_results = []
96+
final_text = []
97+
98+
for content in response.content:
99+
if content.type == "text":
100+
final_text.append(content.text)
101+
elif content.type == "tool_use":
102+
tool_name = content.name
103+
tool_args = content.input
104+
105+
# Execute tool call
106+
result = await self.session.call_tool(tool_name, tool_args)
107+
print(f"Tool result call: {result.content}")
108+
tool_results.append({"call": tool_name, "result": result})
109+
final_text.append(f"[Calling tool {tool_name} with args {tool_args}]")
110+
111+
# Continue conversation with tool results
112+
if hasattr(content, "text") and content.text:
113+
messages.append({"role": "assistant", "content": content.text})
114+
messages.append({"role": "user", "content": result.content})
115+
116+
# Get next response from Claude
117+
response = self.anthropic.messages.create(
118+
model="claude-3-5-haiku-20241022",
119+
max_tokens=1000,
120+
messages=messages,
121+
)
122+
123+
final_text.append(response.content[0].text)
124+
125+
return "\n".join(final_text)
126+
127+
async def chat_loop(self):
128+
"""Run an interactive chat loop"""
129+
print("Type your market research queries or 'quit' to exit.")
130+
131+
while True:
132+
try:
133+
query = input("\nQuery: ").strip()
134+
135+
if query.lower() == "quit":
136+
break
137+
138+
response = await self.process_query(query)
139+
print("\n" + response)
140+
141+
except Exception as e:
142+
print(f"\nError: {str(e)}")
143+
144+
async def cleanup(self):
145+
"""Clean up resources"""
146+
await self.exit_stack.aclose()
147+
148+
149+
async def main():
150+
if len(sys.argv) < 2:
151+
print("Usage: uv run client.py server.py")
152+
sys.exit(1)
153+
154+
client = MCPClient()
155+
try:
156+
await client.connect_to_server(sys.argv[1])
157+
await client.chat_loop()
158+
finally:
159+
await client.cleanup()
160+
161+
162+
# Server code that fails, kept here for reference
163+
# @mcp.tool()
164+
# async def get_subreddit_info(query: str) -> Iterable[ReadResourceContents]:
165+
# async def get_subreddit_info(query: str) -> str:
166+
# """Answer the user query by accessing the subreddit_info from
167+
# get_subreddit resource. Use the reply_with_context prompt"""
168+
169+
# data = await mcp.read_resource("subreddit://info")
170+
# # making the prompt
171+
# prompt = mcp.get_prompt(
172+
# "reply_with_context",
173+
# arguments={"context": data.contents[0].text, "query": query},
174+
# )
175+
# returning the reply.
176+
# return prompt.messages[0].content.text
177+
# return data
178+
# return data[0].content
179+
180+
# @mcp.tool()
181+
# async def get_resources() -> str:
182+
# """Returns the list of resources available with you"""
183+
# resource_list = await mcp.list_resources()
184+
# res_list_str = ",".join([res.name for res in resource_list])
185+
# return f"Available resources with you are: {res_list_str}"
186+
187+
if __name__ == "__main__":
188+
import sys
189+
190+
asyncio.run(main())

0 commit comments

Comments
 (0)