diff --git a/docs/mcp/aws-kb-retrieval.mdx b/docs/mcp/aws-kb-retrieval.mdx
new file mode 100644
index 000000000..7b6f73e77
--- /dev/null
+++ b/docs/mcp/aws-kb-retrieval.mdx
@@ -0,0 +1,84 @@
+---
+title: "AWS KB Retrieval MCP Integration"
+sidebarTitle: "AWS KB Retrieval"
+description: "Guide for integrating AWS Knowledge Base retrieval capabilities with PraisonAI agents using MCP"
+icon: "aws"
+---
+
+## Add AWS KB Retrieval Tool to AI Agent
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Tool[AWS KB Retrieval MCP]
+ Tool --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Tool fill:#FF9900,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Quick Start
+
+
+
+ Make sure you have Node.js installed, as the MCP server requires it:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
+
+ Set your AWS credentials as environment variables in your terminal:
+ ```bash
+ export AWS_ACCESS_KEY_ID=your_aws_access_key_id_here
+ export AWS_SECRET_ACCESS_KEY=your_aws_secret_access_key_here
+ export AWS_REGION=your_aws_region_here
+ export OPENAI_API_KEY=your_openai_api_key_here
+ ```
+
+
+
+ Create a new file `aws_kb_retrieval.py` with the following code:
+ ```python
+ from praisonaiagents import Agent, MCP
+ import os
+
+ # Get AWS credentials from environment
+ aws_access_key = os.getenv("AWS_ACCESS_KEY_ID")
+ aws_secret_key = os.getenv("AWS_SECRET_ACCESS_KEY")
+ aws_region = os.getenv("AWS_REGION")
+
+ # Use a single string command with AWS KB Retrieval configuration
+ aws_kb_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with AWS Knowledge Base.
+ Use the available tools when relevant to retrieve and process AWS information.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-aws-kb-retrieval",
+ env={
+ "AWS_ACCESS_KEY_ID": aws_access_key,
+ "AWS_SECRET_ACCESS_KEY": aws_secret_key,
+ "AWS_REGION": aws_region
+ })
+ )
+
+ aws_kb_agent.start("Search AWS documentation about EC2 instances")
+ ```
+
+
+
+ Execute your script:
+ ```bash
+ python aws_kb_retrieval.py
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - Node.js installed on your system
+ - AWS credentials (Access Key ID, Secret Access Key, and Region)
+ - OpenAI API key (for the agent's LLM)
+
diff --git a/docs/mcp/bravesearch.mdx b/docs/mcp/bravesearch.mdx
index 3906f638c..23f2a33fe 100644
--- a/docs/mcp/bravesearch.mdx
+++ b/docs/mcp/bravesearch.mdx
@@ -5,14 +5,14 @@ description: "Guide for integrating Brave Search capabilities with PraisonAI age
icon: "searchengin"
---
-# Brave Search MCP Integration
+## Add Brave Search Tool to AI Agent
```mermaid
flowchart LR
- In[In] --> Agent[AI Agent]
+ In[Query] --> Agent[AI Agent]
Agent --> Tool[Brave Search MCP]
Tool --> Agent
- Agent --> Out[Out]
+ Agent --> Out[Answer]
style In fill:#8B0000,color:#fff
style Agent fill:#2E8B57,color:#fff
@@ -23,6 +23,12 @@ flowchart LR
## Quick Start
+
+ Make sure you have Node.js installed, as the MCP server requires it:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
Set your Brave Search API key as an environment variable in your terminal:
```bash
@@ -54,13 +60,6 @@ flowchart LR
```
-
- Make sure you have Node.js installed, as the MCP server requires it:
- ```bash
- pip install praisonaiagents
- ```
-
-
Execute your script:
```bash
@@ -76,20 +75,3 @@ flowchart LR
- Brave Search API key
- OpenAI API key (for the agent's LLM)
-
-## Features
-
-
-
- Search the web for up-to-date information.
-
-
- Seamless integration with Model Context Protocol.
-
-
- Secure API key handling through environment variables.
-
-
- Leverages the official Brave Search MCP server package.
-
-
diff --git a/docs/mcp/custom-python-client.mdx b/docs/mcp/custom-python-client.mdx
new file mode 100644
index 000000000..3a8d1bcc0
--- /dev/null
+++ b/docs/mcp/custom-python-client.mdx
@@ -0,0 +1,103 @@
+---
+title: "Custom Python MCP Client"
+sidebarTitle: "Custom Python Client"
+description: "Guide for creating a client to interact with a custom Python MCP server"
+icon: "person-digging"
+---
+
+## Custom Python MCP Client
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Client[Python MCP Client]
+ Client --> Server[Python MCP Server]
+ Server --> Client
+ Client --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Client fill:#3776AB,color:#fff
+ style Server fill:#3776AB,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Overview
+
+The Custom Python MCP Client demonstrates how to integrate a custom Python MCP server with a PraisonAI agent. This client connects to a stock price MCP server to retrieve real-time stock information.
+
+## Quick Start
+
+
+
+ Install the required packages:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
+
+ First, set up the Custom Python MCP Server.
+
+
+ Save the code above to a file named `custom-python-client.py`.
+```python
+from praisonaiagents import Agent, MCP
+
+agent = Agent(
+ instructions="""You are a helpful assistant that can check stock prices and perform other tasks.
+ Use the available tools when relevant to answer user questions.""",
+ llm="gpt-4o-mini",
+ tools = MCP("/Users/praison/miniconda3/envs/mcp/bin/python /Users/praison/stockprice/custom-python-server.py")
+)
+
+# NOTE: Python Path replace with yours: /Users/praison/miniconda3/envs/mcp/bin/python
+# NOTE: custom-python-server.py file path, replace it with yours: /Users/praison/stockprice/custom-python-server.py
+
+agent.start("What is the stock price of Tesla?")
+```
+
+
+ Execute the client script:
+ ```bash
+ python custom-python-client.py
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - praisonaiagents and mcp packages
+ - A properly configured custom Python MCP server
+ - OpenAI API key (for the agent's LLM)
+
+
+## Environment Variables
+
+For better security and flexibility, you can modify the client to use environment variables:
+
+```python
+import os
+from praisonaiagents import Agent, MCP
+
+# Get paths from environment variables or use defaults
+python_path = os.getenv("PYTHON_PATH", "/path/to/python")
+server_path = os.getenv("SERVER_PATH", "/path/to/server.py")
+
+agent = Agent(
+ instructions="""You are a helpful assistant that can check stock prices and perform other tasks.
+ Use the available tools when relevant to answer user questions.""",
+ llm="gpt-4o-mini",
+ tools=MCP(f"{python_path} {server_path}")
+)
+
+agent.start("What is the stock price of Tesla?")
+```
+
+This approach allows you to set the paths using environment variables:
+
+```bash
+export PYTHON_PATH=/Users/praison/miniconda3/envs/mcp/bin/python
+export SERVER_PATH=/Users/praison/stockprice/app.py
+```
\ No newline at end of file
diff --git a/docs/mcp/custom-python-server.mdx b/docs/mcp/custom-python-server.mdx
new file mode 100644
index 000000000..0b1ec83aa
--- /dev/null
+++ b/docs/mcp/custom-python-server.mdx
@@ -0,0 +1,86 @@
+---
+title: "Custom Python MCP Server"
+sidebarTitle: "Custom Python Server"
+description: "Guide for creating a custom Python MCP server for stock price retrieval"
+icon: "server"
+---
+
+## Custom Python MCP Server
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Client[Python MCP Client]
+ Client --> Server[Python MCP Server]
+ Server --> Client
+ Client --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Client fill:#3776AB,color:#fff
+ style Server fill:#3776AB,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Overview
+
+The Custom Python MCP Server is a simple implementation of the Model Context Protocol (MCP) that provides stock price information using the yfinance library. This server can be used with PraisonAI agents to retrieve real-time stock prices.
+
+## Server Implementation
+
+Below is the complete implementation of the custom Python MCP server:
+
+
+## Quick Start
+
+
+
+ Install the required packages:
+ ```bash
+ pip install yfinance mcp
+ ```
+
+
+ Save the code above to a file named `custom-python-server.py`.
+```python
+import yfinance as yf
+from mcp.server.fastmcp import FastMCP
+
+mcp = FastMCP("stock_prices")
+
+@mcp.tool()
+async def get_stock_price(ticker: str) -> str:
+ """Get the current stock price for a given ticker symbol.
+
+ Args:
+ ticker: Stock ticker symbol (e.g., AAPL, MSFT, GOOG)
+
+ Returns:
+ Current stock price as a string
+ """
+ if not ticker:
+ return "No ticker provided"
+ try:
+ stock = yf.Ticker(ticker)
+ info = stock.info
+ current_price = info.get('currentPrice') or info.get('regularMarketPrice')
+ if not current_price:
+ return f"Could not retrieve price for {ticker}"
+ return f"${current_price:.2f}"
+
+ except Exception as e:
+ return f"Error: {str(e)}"
+
+if __name__ == "__main__":
+ mcp.run(transport='stdio')
+```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - yfinance package
+ - mcp package
+
diff --git a/docs/mcp/custom.mdx b/docs/mcp/custom.mdx
index 38344b015..c3a63a083 100644
--- a/docs/mcp/custom.mdx
+++ b/docs/mcp/custom.mdx
@@ -25,71 +25,69 @@ flowchart LR
Create a new file `app.py` with your custom MCP server implementation:
- ```python
- import yfinance as yf
- from mcp.server.fastmcp import FastMCP
-
- mcp = FastMCP("stock_prices")
-
- @mcp.tool()
- async def get_stock_price(ticker: str) -> str:
- """Get the current stock price for a given ticker symbol.
-
- Args:
- ticker: Stock ticker symbol (e.g., AAPL, MSFT, GOOG)
-
- Returns:
- Current stock price as a string
- """
- if not ticker:
- return "No ticker provided"
- try:
- stock = yf.Ticker(ticker)
- info = stock.info
- current_price = info.get('currentPrice') or info.get('regularMarketPrice')
- if not current_price:
- return f"Could not retrieve price for {ticker}"
- return f"${current_price:.2f}"
-
- except Exception as e:
- return f"Error: {str(e)}"
-
- if __name__ == "__main__":
- mcp.run(transport='stdio')
- ```
+```python
+import yfinance as yf
+from mcp.server.fastmcp import FastMCP
+
+mcp = FastMCP("stock_prices")
+
+@mcp.tool()
+async def get_stock_price(ticker: str) -> str:
+ """Get the current stock price for a given ticker symbol.
+
+ Args:
+ ticker: Stock ticker symbol (e.g., AAPL, MSFT, GOOG)
+
+ Returns:
+ Current stock price as a string
+ """
+ if not ticker:
+ return "No ticker provided"
+ try:
+ stock = yf.Ticker(ticker)
+ info = stock.info
+ current_price = info.get('currentPrice') or info.get('regularMarketPrice')
+ if not current_price:
+ return f"Could not retrieve price for {ticker}"
+ return f"${current_price:.2f}"
+
+ except Exception as e:
+ return f"Error: {str(e)}"
+
+if __name__ == "__main__":
+ mcp.run(transport='stdio')
+```
Install the required dependencies in a conda environment:
```bash
- conda create -n mcp python=3.10
- conda activate mcp
- pip install yfinance mcp-python-sdk
+ pip install yfinance mcp
```
Create a new file `stock_agent.py` with the following code:
- ```python
- from praisonaiagents import Agent, MCP
+```python
+from praisonaiagents import Agent, MCP
- agent = Agent(
- instructions="""You are a helpful assistant that can check stock prices and perform other tasks.
- Use the available tools when relevant to answer user questions.""",
- llm="gpt-4o-mini",
- tools = MCP("/path/to/python /path/to/app.py")
- )
+agent = Agent(
+ instructions="""You are a helpful assistant that can check stock prices and perform other tasks.
+ Use the available tools when relevant to answer user questions.""",
+ llm="gpt-4o-mini",
+ tools = MCP("/path/to/python /path/to/app.py")
+)
- # NOTE: Replace with your actual Python path and app.py file path
+# NOTE: Replace with your actual Python path and app.py file path
- agent.start("What is the stock price of Tesla?")
- ```
+agent.start("What is the stock price of Tesla?")
+```
Execute your script:
```bash
- zsh -c "source $(conda info --base)/etc/profile.d/conda.sh && conda activate windsurf && python stock_agent.py"
+ python stock_agent.py
```
diff --git a/docs/mcp/everart.mdx b/docs/mcp/everart.mdx
new file mode 100644
index 000000000..2d520f1c8
--- /dev/null
+++ b/docs/mcp/everart.mdx
@@ -0,0 +1,76 @@
+---
+title: "Everart MCP Integration"
+sidebarTitle: "Everart"
+description: "Guide for integrating Everart AI art generation capabilities with PraisonAI agents using MCP"
+icon: "palette"
+---
+
+## Add Everart Tool to AI Agent
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Tool[Everart MCP]
+ Tool --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Tool fill:#FF6B6B,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Quick Start
+
+
+
+ Make sure you have Node.js installed, as the MCP server requires it:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
+
+ Set your Everart API key as an environment variable in your terminal:
+ ```bash
+ export EVERART_API_KEY=your_everart_api_key_here
+ export OPENAI_API_KEY=your_openai_api_key_here
+ ```
+
+
+
+ Create a new file `everart_agent.py` with the following code:
+ ```python
+ from praisonaiagents import Agent, MCP
+ import os
+
+ # Get Everart API key from environment
+ everart_api_key = os.getenv("EVERART_API_KEY")
+
+ # Use a single string command with Everart configuration
+ everart_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with Everart.
+ Use the available tools when relevant to generate and manage art.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-everart",
+ env={"EVERART_API_KEY": everart_api_key})
+ )
+
+ everart_agent.start("Generate an artistic image of a sunset")
+ ```
+
+
+
+ Execute your script:
+ ```bash
+ python everart_agent.py
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - Node.js installed on your system
+ - Everart API key
+ - OpenAI API key (for the agent's LLM)
+
diff --git a/docs/mcp/filesystem.mdx b/docs/mcp/filesystem.mdx
new file mode 100644
index 000000000..994b3a883
--- /dev/null
+++ b/docs/mcp/filesystem.mdx
@@ -0,0 +1,77 @@
+---
+title: "Filesystem MCP Integration"
+sidebarTitle: "Filesystem"
+description: "Guide for integrating filesystem operations with PraisonAI agents using MCP"
+icon: "folder-open"
+---
+
+## Add Filesystem Tool to AI Agent
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Tool[Filesystem MCP]
+ Tool --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Tool fill:#FFA500,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Quick Start
+
+
+
+ Make sure you have Node.js installed, as the MCP server requires it:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
+
+ Set your OpenAI API key as an environment variable in your terminal:
+ ```bash
+ export OPENAI_API_KEY=your_openai_api_key_here
+ ```
+
+
+
+ Create a new file `filesystem_agent.py` with the following code:
+ ```python
+ from praisonaiagents import Agent, MCP
+ import os
+
+ # Define allowed directories for filesystem access
+ allowed_dirs = [
+ "/Users/username/Desktop",
+ "/path/to/other/allowed/dir"
+ ]
+
+ # Use a single string command with allowed directories
+ filesystem_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with the filesystem.
+ Use the available tools when relevant to manage files and directories.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-filesystem", args=allowed_dirs)
+ )
+
+ filesystem_agent.start("List files in the allowed directories")
+ ```
+
+
+
+ Execute your script:
+ ```bash
+ python filesystem_agent.py
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - Node.js installed on your system
+ - OpenAI API key (for the agent's LLM)
+ - Read/write access to the specified directories
+
diff --git a/docs/mcp/gdrive.mdx b/docs/mcp/gdrive.mdx
new file mode 100644
index 000000000..b7208f137
--- /dev/null
+++ b/docs/mcp/gdrive.mdx
@@ -0,0 +1,76 @@
+---
+title: "Google Drive MCP Integration"
+sidebarTitle: "Google Drive"
+description: "Guide for integrating Google Drive file management with PraisonAI agents using MCP"
+icon: "google-drive"
+---
+
+## Add Google Drive Tool to AI Agent
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Tool[Google Drive MCP]
+ Tool --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Tool fill:#0F9D58,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Quick Start
+
+
+
+ Make sure you have Node.js installed, as the MCP server requires it:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
+
+ Set your Google Drive credentials path as an environment variable in your terminal:
+ ```bash
+ export GDRIVE_CREDENTIALS_PATH=path/to/your/gcp-oauth.keys.json
+ export OPENAI_API_KEY=your_openai_api_key_here
+ ```
+
+
+
+ Create a new file `gdrive_agent.py` with the following code:
+ ```python
+ from praisonaiagents import Agent, MCP
+ import os
+
+ # Get the credentials path from environment
+ gdrive_credentials = os.getenv("GDRIVE_CREDENTIALS_PATH", "servers/gcp-oauth.keys.json")
+
+ # Use a single string command with Google Drive configuration
+ gdrive_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with Google Drive.
+ Use the available tools when relevant to manage files and folders.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-gdrive",
+ env={"GDRIVE_CREDENTIALS_PATH": gdrive_credentials})
+ )
+
+ gdrive_agent.start("List files in my Google Drive")
+ ```
+
+
+
+ Execute your script:
+ ```bash
+ python gdrive_agent.py
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - Node.js installed on your system
+ - Google Drive API credentials (OAuth keys)
+ - OpenAI API key (for the agent's LLM)
+
diff --git a/docs/mcp/github.mdx b/docs/mcp/github.mdx
new file mode 100644
index 000000000..2ba63dbcb
--- /dev/null
+++ b/docs/mcp/github.mdx
@@ -0,0 +1,76 @@
+---
+title: "GitHub MCP Integration"
+sidebarTitle: "GitHub"
+description: "Guide for integrating GitHub repository management with PraisonAI agents using MCP"
+icon: "github"
+---
+
+## Add GitHub Tool to AI Agent
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Tool[GitHub MCP]
+ Tool --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Tool fill:#181717,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Quick Start
+
+
+
+ Make sure you have Node.js installed, as the MCP server requires it:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
+
+ Set your GitHub Personal Access Token as an environment variable in your terminal:
+ ```bash
+ export GITHUB_PERSONAL_ACCESS_TOKEN=your_github_token_here
+ export OPENAI_API_KEY=your_openai_api_key_here
+ ```
+
+
+
+ Create a new file `github_agent.py` with the following code:
+ ```python
+ from praisonaiagents import Agent, MCP
+ import os
+
+ # Use the API key from environment or set it directly
+ github_token = os.getenv("GITHUB_PERSONAL_ACCESS_TOKEN")
+
+ # Use a single string command with environment variables
+ github_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with GitHub.
+ Use the available tools when relevant to answer user questions.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-github",
+ env={"GITHUB_PERSONAL_ACCESS_TOKEN": github_token})
+ )
+
+ github_agent.start("List my GitHub repositories")
+ ```
+
+
+
+ Execute your script:
+ ```bash
+ python github_agent.py
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - Node.js installed on your system
+ - GitHub Personal Access Token
+ - OpenAI API key (for the agent's LLM)
+
diff --git a/docs/mcp/gitlab.mdx b/docs/mcp/gitlab.mdx
new file mode 100644
index 000000000..06c7d7667
--- /dev/null
+++ b/docs/mcp/gitlab.mdx
@@ -0,0 +1,81 @@
+---
+title: "GitLab MCP Integration"
+sidebarTitle: "GitLab"
+description: "Guide for integrating GitLab repository management with PraisonAI agents using MCP"
+icon: "gitlab"
+---
+
+## Add GitLab Tool to AI Agent
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Tool[GitLab MCP]
+ Tool --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Tool fill:#FC6D26,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Quick Start
+
+
+
+ Make sure you have Node.js installed, as the MCP server requires it:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
+
+ Set your GitLab credentials as environment variables in your terminal:
+ ```bash
+ export GITLAB_PERSONAL_ACCESS_TOKEN=your_gitlab_token_here
+ export GITLAB_API_URL=https://gitlab.com/api/v4
+ export OPENAI_API_KEY=your_openai_api_key_here
+ ```
+
+
+
+ Create a new file `gitlab_agent.py` with the following code:
+ ```python
+ from praisonaiagents import Agent, MCP
+ import os
+
+ # Use the API token and URL from environment or set directly
+ gitlab_token = os.getenv("GITLAB_PERSONAL_ACCESS_TOKEN")
+ gitlab_api_url = os.getenv("GITLAB_API_URL", "https://gitlab.com/api/v4")
+
+ # Use a single string command with environment variables
+ gitlab_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with GitLab.
+ Use the available tools when relevant to answer user questions.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-gitlab",
+ env={
+ "GITLAB_PERSONAL_ACCESS_TOKEN": gitlab_token,
+ "GITLAB_API_URL": gitlab_api_url
+ })
+ )
+
+ gitlab_agent.start("List my GitLab projects")
+ ```
+
+
+
+ Execute your script:
+ ```bash
+ python gitlab_agent.py
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - Node.js installed on your system
+ - GitLab Personal Access Token
+ - OpenAI API key (for the agent's LLM)
+
diff --git a/docs/mcp/google-maps.mdx b/docs/mcp/google-maps.mdx
new file mode 100644
index 000000000..82a7a2bc9
--- /dev/null
+++ b/docs/mcp/google-maps.mdx
@@ -0,0 +1,76 @@
+---
+title: "Google Maps MCP Integration"
+sidebarTitle: "Google Maps"
+description: "Guide for integrating Google Maps location services with PraisonAI agents using MCP"
+icon: "map"
+---
+
+## Add Google Maps Tool to AI Agent
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Tool[Google Maps MCP]
+ Tool --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Tool fill:#4285F4,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Quick Start
+
+
+
+ Make sure you have Node.js installed, as the MCP server requires it:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
+
+ Set your Google Maps API key as an environment variable in your terminal:
+ ```bash
+ export GOOGLE_MAPS_API_KEY=your_google_maps_api_key_here
+ export OPENAI_API_KEY=your_openai_api_key_here
+ ```
+
+
+
+ Create a new file `google_maps_agent.py` with the following code:
+ ```python
+ from praisonaiagents import Agent, MCP
+ import os
+
+ # Get the API key from environment
+ maps_api_key = os.getenv("GOOGLE_MAPS_API_KEY")
+
+ # Use a single string command with Google Maps configuration
+ maps_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with Google Maps.
+ Use the available tools when relevant to handle location-based queries.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-google-maps",
+ env={"GOOGLE_MAPS_API_KEY": maps_api_key})
+ )
+
+ maps_agent.start("Find nearby restaurants in London")
+ ```
+
+
+
+ Execute your script:
+ ```bash
+ python google_maps_agent.py
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - Node.js installed on your system
+ - Google Maps API key
+ - OpenAI API key (for the agent's LLM)
+
diff --git a/docs/mcp/memory.mdx b/docs/mcp/memory.mdx
new file mode 100644
index 000000000..e22832c87
--- /dev/null
+++ b/docs/mcp/memory.mdx
@@ -0,0 +1,75 @@
+---
+title: "Memory MCP Integration"
+sidebarTitle: "Memory"
+description: "Guide for integrating memory storage capabilities with PraisonAI agents using MCP"
+icon: "memory"
+---
+
+## Add Memory Tool to AI Agent
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Tool[Memory MCP]
+ Tool --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Tool fill:#4B0082,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Quick Start
+
+
+
+ Make sure you have Node.js installed, as the MCP server requires it:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
+
+ Set your OpenAI API key as an environment variable in your terminal:
+ ```bash
+ export OPENAI_API_KEY=your_openai_api_key_here
+ ```
+
+
+
+ Create a new file `memory_agent.py` with the following code:
+ ```python
+ from praisonaiagents import Agent, MCP
+ import os
+
+ # Get the memory file path from environment
+ memory_file_path = os.getenv("MEMORY_FILE_PATH", "/path/to/custom/memory.json")
+
+ # Use a single string command with Memory configuration
+ memory_agent = Agent(
+ instructions="""You are a helpful assistant that can store and retrieve information.
+ Use the available tools when relevant to manage memory operations.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-memory",
+ env={"MEMORY_FILE_PATH": memory_file_path})
+ )
+
+ memory_agent.start("Store this conversation in memory")
+ ```
+
+
+
+ Execute your script:
+ ```bash
+ python memory_agent.py
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - Node.js installed on your system
+ - OpenAI API key (for the agent's LLM)
+ - Write access to the specified memory file path
+
diff --git a/docs/mcp/postgres.mdx b/docs/mcp/postgres.mdx
new file mode 100644
index 000000000..65c5a91a6
--- /dev/null
+++ b/docs/mcp/postgres.mdx
@@ -0,0 +1,71 @@
+---
+title: "PostgreSQL MCP Integration"
+sidebarTitle: "PostgreSQL"
+description: "Guide for integrating PostgreSQL database operations with PraisonAI agents using MCP"
+icon: "database"
+---
+
+## Add PostgreSQL Tool to AI Agent
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Tool[PostgreSQL MCP]
+ Tool --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Tool fill:#336791,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Quick Start
+
+
+
+ Make sure you have Node.js installed, as the MCP server requires it:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
+
+ Ensure you have PostgreSQL running locally or specify your PostgreSQL connection URL.
+
+
+
+ Create a new file `postgres_agent.py` with the following code:
+ ```python
+ from praisonaiagents import Agent, MCP
+ import os
+
+ # PostgreSQL connection string
+ postgres_url = "postgresql://localhost/mydb"
+
+ # Use a single string command with PostgreSQL configuration
+ postgres_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with PostgreSQL databases.
+ Use the available tools when relevant to manage database operations.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-postgres", args=[postgres_url])
+ )
+
+ postgres_agent.start("List all tables in the database")
+ ```
+
+
+
+ Execute your script:
+ ```bash
+ python postgres_agent.py
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - Node.js installed on your system
+ - PostgreSQL server running locally or remotely
+ - OpenAI API key (for the agent's LLM)
+
diff --git a/docs/mcp/puppeteer.mdx b/docs/mcp/puppeteer.mdx
new file mode 100644
index 000000000..db4f6809d
--- /dev/null
+++ b/docs/mcp/puppeteer.mdx
@@ -0,0 +1,71 @@
+---
+title: "Puppeteer MCP Integration"
+sidebarTitle: "Puppeteer"
+description: "Guide for integrating web automation capabilities with PraisonAI agents using Puppeteer MCP"
+icon: "chrome"
+---
+
+## Add Puppeteer Tool to AI Agent
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Tool[Puppeteer MCP]
+ Tool --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Tool fill:#00B4FF,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Quick Start
+
+
+
+ Make sure you have Node.js installed, as the MCP server requires it:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
+
+ Set your OpenAI API key as an environment variable in your terminal:
+ ```bash
+ export OPENAI_API_KEY=your_openai_api_key_here
+ ```
+
+
+
+ Create a new file `puppeteer_agent.py` with the following code:
+ ```python
+ from praisonaiagents import Agent, MCP
+ import os
+
+ # Use a single string command with Puppeteer configuration
+ puppeteer_agent = Agent(
+ instructions="""You are a helpful assistant that can automate web browser interactions.
+ Use the available tools when relevant to perform web automation tasks.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-puppeteer")
+ )
+
+ puppeteer_agent.start("Navigate to example.com and take a screenshot")
+ ```
+
+
+
+ Execute your script:
+ ```bash
+ python puppeteer_agent.py
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - Node.js installed on your system
+ - OpenAI API key (for the agent's LLM)
+ - Chrome or Chromium browser installed on your system
+
diff --git a/docs/mcp/redis.mdx b/docs/mcp/redis.mdx
new file mode 100644
index 000000000..fa9a9c3ad
--- /dev/null
+++ b/docs/mcp/redis.mdx
@@ -0,0 +1,71 @@
+---
+title: "Redis MCP Integration"
+sidebarTitle: "Redis"
+description: "Guide for integrating Redis database operations with PraisonAI agents using MCP"
+icon: "database"
+---
+
+## Add Redis Tool to AI Agent
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Tool[Redis MCP]
+ Tool --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Tool fill:#DC382D,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Quick Start
+
+
+
+ Make sure you have Node.js installed, as the MCP server requires it:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
+
+ Ensure you have Redis running locally or specify your Redis connection URL.
+
+
+
+ Create a new file `redis_agent.py` with the following code:
+ ```python
+ from praisonaiagents import Agent, MCP
+ import os
+
+ # Redis connection string
+ redis_url = "redis://localhost:6379"
+
+ # Use a single string command with Redis configuration
+ redis_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with Redis.
+ Use the available tools when relevant to manage Redis operations.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-redis", args=[redis_url])
+ )
+
+ redis_agent.start("Set a key-value pair in Redis")
+ ```
+
+
+
+ Execute your script:
+ ```bash
+ python redis_agent.py
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - Node.js installed on your system
+ - Redis server running locally or remotely
+ - OpenAI API key (for the agent's LLM)
+
diff --git a/docs/mcp/sequential-thinking.mdx b/docs/mcp/sequential-thinking.mdx
new file mode 100644
index 000000000..3c1dd8485
--- /dev/null
+++ b/docs/mcp/sequential-thinking.mdx
@@ -0,0 +1,70 @@
+---
+title: "Sequential Thinking MCP Integration"
+sidebarTitle: "Sequential Thinking"
+description: "Guide for integrating sequential thinking capabilities with PraisonAI agents using MCP"
+icon: "brain"
+---
+
+## Add Sequential Thinking Tool to AI Agent
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Tool[Sequential Thinking MCP]
+ Tool --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Tool fill:#6A5ACD,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Quick Start
+
+
+
+ Make sure you have Node.js installed, as the MCP server requires it:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
+
+ Set your OpenAI API key as an environment variable in your terminal:
+ ```bash
+ export OPENAI_API_KEY=your_openai_api_key_here
+ ```
+
+
+
+ Create a new file `sequential_thinking.py` with the following code:
+ ```python
+ from praisonaiagents import Agent, MCP
+ import os
+
+ # Use a single string command with Sequential Thinking configuration
+ sequential_agent = Agent(
+ instructions="""You are a helpful assistant that can break down complex problems.
+ Use the available tools when relevant to perform step-by-step analysis.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-sequential-thinking")
+ )
+
+ sequential_agent.start("Break down the process of making a cup of tea")
+ ```
+
+
+
+ Execute your script:
+ ```bash
+ python sequential_thinking.py
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - Node.js installed on your system
+ - OpenAI API key (for the agent's LLM)
+
diff --git a/docs/mcp/slack.mdx b/docs/mcp/slack.mdx
new file mode 100644
index 000000000..619bcf632
--- /dev/null
+++ b/docs/mcp/slack.mdx
@@ -0,0 +1,81 @@
+---
+title: "Slack MCP Integration"
+sidebarTitle: "Slack"
+description: "Guide for integrating Slack messaging capabilities with PraisonAI agents using MCP"
+icon: "slack"
+---
+
+## Add Slack Tool to AI Agent
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Tool[Slack MCP]
+ Tool --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Tool fill:#4A154B,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Quick Start
+
+
+
+ Make sure you have Node.js installed, as the MCP server requires it:
+ ```bash
+ pip install praisonaiagents mcp
+ ```
+
+
+ Set your Slack credentials as environment variables in your terminal:
+ ```bash
+ export SLACK_BOT_TOKEN=your_slack_bot_token_here
+ export SLACK_TEAM_ID=your_slack_team_id_here
+ export OPENAI_API_KEY=your_openai_api_key_here
+ ```
+
+
+
+ Create a new file `slack_agent.py` with the following code:
+ ```python
+ from praisonaiagents import Agent, MCP
+ import os
+
+ # Get Slack credentials from environment
+ slack_token = os.getenv("SLACK_BOT_TOKEN")
+ slack_team_id = os.getenv("SLACK_TEAM_ID")
+
+ # Use a single string command with Slack configuration
+ slack_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with Slack.
+ Use the available tools when relevant to manage Slack communications.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-slack",
+ env={
+ "SLACK_BOT_TOKEN": slack_token,
+ "SLACK_TEAM_ID": slack_team_id
+ })
+ )
+
+ slack_agent.start("Send a message to the general channel")
+ ```
+
+
+
+ Execute your script:
+ ```bash
+ python slack_agent.py
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - Node.js installed on your system
+ - Slack Bot Token and Team ID
+ - OpenAI API key (for the agent's LLM)
+
diff --git a/docs/mcp/stockprice.mdx b/docs/mcp/stockprice.mdx
new file mode 100644
index 000000000..f9a2673f5
--- /dev/null
+++ b/docs/mcp/stockprice.mdx
@@ -0,0 +1,153 @@
+---
+title: "Stock Price MCP Integration"
+sidebarTitle: "Stock Price"
+description: "Guide for integrating stock price retrieval capabilities with PraisonAI agents using MCP"
+icon: "chart-line"
+---
+
+## Add Stock Price Tool to AI Agent
+
+```mermaid
+flowchart LR
+ In[Query] --> Agent[AI Agent]
+ Agent --> Tool[Stock Price MCP]
+ Tool --> Agent
+ Agent --> Out[Answer]
+
+ style In fill:#8B0000,color:#fff
+ style Agent fill:#2E8B57,color:#fff
+ style Tool fill:#2E8B57,color:#fff
+ style Out fill:#8B0000,color:#fff
+```
+
+## Quick Start
+
+
+
+ Create a conda environment and install the required packages:
+ ```bash
+ zsh -c "source $(conda info --base)/etc/profile.d/conda.sh && conda create -n windsurf python=3.10 -y"
+ zsh -c "source $(conda info --base)/etc/profile.d/conda.sh && conda activate windsurf && pip install praisonaiagents mcp yfinance"
+ ```
+
+
+ Set your OpenAI API key as an environment variable in your terminal:
+ ```bash
+ export OPENAI_API_KEY=your_openai_api_key_here
+ ```
+
+
+
+ Create a new file `stock_price_server.py` with the following code:
+ ```python
+ import yfinance as yf
+ from mcp.server.fastmcp import FastMCP
+
+ mcp = FastMCP("stock_prices")
+
+ @mcp.tool()
+ async def get_stock_price(ticker: str) -> str:
+ """Get the current stock price for a given ticker symbol.
+
+ Args:
+ ticker: Stock ticker symbol (e.g., AAPL, MSFT, GOOG)
+
+ Returns:
+ Current stock price as a string
+ """
+ if not ticker:
+ return "No ticker provided"
+ try:
+ stock = yf.Ticker(ticker)
+ info = stock.info
+ current_price = info.get('currentPrice') or info.get('regularMarketPrice')
+ if not current_price:
+ return f"Could not retrieve price for {ticker}"
+ return f"${current_price:.2f}"
+
+ except Exception as e:
+ return f"Error: {str(e)}"
+
+ if __name__ == "__main__":
+ mcp.run(transport='stdio')
+ ```
+
+
+
+ Create a new file `stock_price_agent.py` with the following code:
+ ```python
+ from praisonaiagents import Agent, MCP
+ import os
+
+ # Get the path to your Python interpreter and the server file
+ python_path = os.getenv("PYTHON_PATH", "/path/to/your/python")
+ server_path = os.getenv("SERVER_PATH", "/path/to/your/stock_price_server.py")
+
+ # Create the agent with the stock price MCP tool
+ agent = Agent(
+ instructions="""You are a helpful assistant that can check stock prices.
+ Use the available tools when relevant to answer user questions.""",
+ llm="gpt-4o-mini",
+ tools=MCP(f"{python_path} {server_path}")
+ )
+
+ agent.start("What is the stock price of Tesla?")
+ ```
+
+
+
+ Execute your script:
+ ```bash
+ zsh -c "source $(conda info --base)/etc/profile.d/conda.sh && conda activate windsurf && python stock_price_agent.py"
+ ```
+
+
+
+
+ **Requirements**
+ - Python 3.10 or higher
+ - yfinance package
+ - mcp-python-sdk package
+ - praisonaiagents package
+ - OpenAI API key (for the agent's LLM)
+
+
+## Gradio UI Example
+
+You can also create a simple web UI for your stock price agent using Gradio:
+
+```python
+from praisonaiagents import Agent, MCP
+import gradio as gr
+import os
+
+# Get the path to your Python interpreter and the server file
+python_path = os.getenv("PYTHON_PATH", "/path/to/your/python")
+server_path = os.getenv("SERVER_PATH", "/path/to/your/stock_price_server.py")
+
+# Create the agent with the stock price MCP tool
+agent = Agent(
+ instructions="""You are a helpful assistant that can check stock prices.
+ Use the available tools when relevant to answer user questions.""",
+ llm="gpt-4o-mini",
+ tools=MCP(f"{python_path} {server_path}")
+)
+
+def chat(message, history):
+ return agent.chat(message)
+
+demo = gr.ChatInterface(
+ chat,
+ title="Stock Price Assistant",
+ description="Ask about any stock price and get real-time information",
+ theme="soft"
+)
+
+if __name__ == "__main__":
+ demo.launch()
+```
+
+Install Gradio with:
+```bash
+zsh -c "source $(conda info --base)/etc/profile.d/conda.sh && conda activate windsurf && pip install gradio"
+```
diff --git a/docs/mint.json b/docs/mint.json
index 2c3b275bb..22416df21 100644
--- a/docs/mint.json
+++ b/docs/mint.json
@@ -237,9 +237,24 @@
{
"group": "MCP",
"pages": [
- "mcp/bravesearch",
"mcp/airbnb",
- "mcp/custom"
+ "mcp/bravesearch",
+ "mcp/custom",
+ "mcp/sequential-thinking",
+ "mcp/filesystem",
+ "mcp/github",
+ "mcp/gdrive",
+ "mcp/gitlab",
+ "mcp/google-maps",
+ "mcp/memory",
+ "mcp/redis",
+ "mcp/stockprice",
+ "mcp/postgres",
+ "mcp/puppeteer",
+ "mcp/everart",
+ "mcp/slack",
+ "mcp/custom-python-client",
+ "mcp/custom-python-server"
]
},
{
diff --git a/examples/mcp/airbnb-mcp.py b/examples/mcp/airbnb-mcp.py
new file mode 100644
index 000000000..dabe2d4c7
--- /dev/null
+++ b/examples/mcp/airbnb-mcp.py
@@ -0,0 +1,9 @@
+from praisonaiagents import Agent, MCP
+
+search_agent = Agent(
+ instructions="""You help book apartments on Airbnb.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @openbnb/mcp-server-airbnb --ignore-robots-txt")
+)
+
+search_agent.start("I want to book an apartment in Paris for 2 nights. 03/28 - 03/30 for 2 adults")
\ No newline at end of file
diff --git a/examples/mcp/aws-kb-retrieval-mcp.py b/examples/mcp/aws-kb-retrieval-mcp.py
new file mode 100644
index 000000000..7921c0376
--- /dev/null
+++ b/examples/mcp/aws-kb-retrieval-mcp.py
@@ -0,0 +1,22 @@
+from praisonaiagents import Agent, MCP
+import os
+
+# Get AWS credentials from environment
+aws_access_key = os.getenv("AWS_ACCESS_KEY_ID")
+aws_secret_key = os.getenv("AWS_SECRET_ACCESS_KEY")
+aws_region = os.getenv("AWS_REGION")
+
+# Use a single string command with AWS KB Retrieval configuration
+aws_kb_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with AWS Knowledge Base.
+ Use the available tools when relevant to retrieve and process AWS information.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-aws-kb-retrieval",
+ env={
+ "AWS_ACCESS_KEY_ID": aws_access_key,
+ "AWS_SECRET_ACCESS_KEY": aws_secret_key,
+ "AWS_REGION": aws_region
+ })
+)
+
+aws_kb_agent.start("Search AWS documentation about EC2 instances")
\ No newline at end of file
diff --git a/examples/mcp/bravesearch-mcp.py b/examples/mcp/bravesearch-mcp.py
new file mode 100644
index 000000000..8a47dab94
--- /dev/null
+++ b/examples/mcp/bravesearch-mcp.py
@@ -0,0 +1,15 @@
+from praisonaiagents import Agent, MCP
+import os
+
+# Use the API key from environment or set it directly
+brave_api_key = os.getenv("BRAVE_API_KEY")
+
+# Use a single string command with environment variables
+search_agent = Agent(
+ instructions="""You are a helpful assistant that can search the web for information.
+ Use the available tools when relevant to answer user questions.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-brave-search", env={"BRAVE_API_KEY": brave_api_key})
+)
+
+search_agent.start("Search more information about AI News")
\ No newline at end of file
diff --git a/examples/mcp/custom-python-client.py b/examples/mcp/custom-python-client.py
new file mode 100644
index 000000000..a8f57e413
--- /dev/null
+++ b/examples/mcp/custom-python-client.py
@@ -0,0 +1,13 @@
+from praisonaiagents import Agent, MCP
+
+agent = Agent(
+ instructions="""You are a helpful assistant that can check stock prices and perform other tasks.
+ Use the available tools when relevant to answer user questions.""",
+ llm="gpt-4o-mini",
+ tools = MCP("/Users/praison/miniconda3/envs/mcp/bin/python /Users/praison/stockprice/app.py")
+)
+
+# NOTE: Python Path replace with yours: /Users/praison/miniconda3/envs/mcp/bin/python
+# NOTE: app.py file path, replace it with yours: /Users/praison/stockprice/app.py
+
+agent.start("What is the stock price of Tesla?")
\ No newline at end of file
diff --git a/examples/mcp/custom-python-server.py b/examples/mcp/custom-python-server.py
new file mode 100644
index 000000000..d40af7165
--- /dev/null
+++ b/examples/mcp/custom-python-server.py
@@ -0,0 +1,30 @@
+import yfinance as yf
+from mcp.server.fastmcp import FastMCP
+
+mcp = FastMCP("stock_prices")
+
+@mcp.tool()
+async def get_stock_price(ticker: str) -> str:
+ """Get the current stock price for a given ticker symbol.
+
+ Args:
+ ticker: Stock ticker symbol (e.g., AAPL, MSFT, GOOG)
+
+ Returns:
+ Current stock price as a string
+ """
+ if not ticker:
+ return "No ticker provided"
+ try:
+ stock = yf.Ticker(ticker)
+ info = stock.info
+ current_price = info.get('currentPrice') or info.get('regularMarketPrice')
+ if not current_price:
+ return f"Could not retrieve price for {ticker}"
+ return f"${current_price:.2f}"
+
+ except Exception as e:
+ return f"Error: {str(e)}"
+
+if __name__ == "__main__":
+ mcp.run(transport='stdio')
\ No newline at end of file
diff --git a/examples/mcp/everart-mcp.py b/examples/mcp/everart-mcp.py
new file mode 100644
index 000000000..11a6ab13b
--- /dev/null
+++ b/examples/mcp/everart-mcp.py
@@ -0,0 +1,16 @@
+from praisonaiagents import Agent, MCP
+import os
+
+# Get Everart API key from environment
+everart_api_key = os.getenv("EVERART_API_KEY")
+
+# Use a single string command with Everart configuration
+everart_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with Everart.
+ Use the available tools when relevant to generate and manage art.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-everart",
+ env={"EVERART_API_KEY": everart_api_key})
+)
+
+everart_agent.start("Generate an artistic image of a sunset")
\ No newline at end of file
diff --git a/examples/mcp/filesystem-mcp.py b/examples/mcp/filesystem-mcp.py
new file mode 100644
index 000000000..f506967a6
--- /dev/null
+++ b/examples/mcp/filesystem-mcp.py
@@ -0,0 +1,18 @@
+from praisonaiagents import Agent, MCP
+import os
+
+# Define allowed directories for filesystem access
+allowed_dirs = [
+ "/Users/username/Desktop",
+ "/path/to/other/allowed/dir"
+]
+
+# Use a single string command with allowed directories
+filesystem_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with the filesystem.
+ Use the available tools when relevant to manage files and directories.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-filesystem", args=allowed_dirs)
+)
+
+filesystem_agent.start("List files in the allowed directories")
\ No newline at end of file
diff --git a/examples/mcp/gdrive-mcp.py b/examples/mcp/gdrive-mcp.py
new file mode 100644
index 000000000..e9f3f2bf8
--- /dev/null
+++ b/examples/mcp/gdrive-mcp.py
@@ -0,0 +1,16 @@
+from praisonaiagents import Agent, MCP
+import os
+
+# Get the credentials path from environment
+gdrive_credentials = os.getenv("GDRIVE_CREDENTIALS_PATH", "servers/gcp-oauth.keys.json")
+
+# Use a single string command with Google Drive configuration
+gdrive_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with Google Drive.
+ Use the available tools when relevant to manage files and folders.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-gdrive",
+ env={"GDRIVE_CREDENTIALS_PATH": gdrive_credentials})
+)
+
+gdrive_agent.start("List files in my Google Drive")
\ No newline at end of file
diff --git a/examples/mcp/github-mcp.py b/examples/mcp/github-mcp.py
new file mode 100644
index 000000000..68e1621b7
--- /dev/null
+++ b/examples/mcp/github-mcp.py
@@ -0,0 +1,15 @@
+from praisonaiagents import Agent, MCP
+import os
+
+# Use the API key from environment or set it directly
+github_token = os.getenv("GITHUB_PERSONAL_ACCESS_TOKEN")
+
+# Use a single string command with environment variables
+github_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with GitHub.
+ Use the available tools when relevant to answer user questions.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-github", env={"GITHUB_PERSONAL_ACCESS_TOKEN": github_token})
+)
+
+github_agent.start("List my GitHub repositories")
\ No newline at end of file
diff --git a/examples/mcp/gitlab-mcp.py b/examples/mcp/gitlab-mcp.py
new file mode 100644
index 000000000..b23aa5651
--- /dev/null
+++ b/examples/mcp/gitlab-mcp.py
@@ -0,0 +1,20 @@
+from praisonaiagents import Agent, MCP
+import os
+
+# Use the API token and URL from environment or set directly
+gitlab_token = os.getenv("GITLAB_PERSONAL_ACCESS_TOKEN")
+gitlab_api_url = os.getenv("GITLAB_API_URL", "https://gitlab.com/api/v4")
+
+# Use a single string command with environment variables
+gitlab_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with GitLab.
+ Use the available tools when relevant to answer user questions.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-gitlab",
+ env={
+ "GITLAB_PERSONAL_ACCESS_TOKEN": gitlab_token,
+ "GITLAB_API_URL": gitlab_api_url
+ })
+)
+
+gitlab_agent.start("List my GitLab projects")
\ No newline at end of file
diff --git a/examples/mcp/google-maps-mcp.py b/examples/mcp/google-maps-mcp.py
new file mode 100644
index 000000000..bb4b879ea
--- /dev/null
+++ b/examples/mcp/google-maps-mcp.py
@@ -0,0 +1,16 @@
+from praisonaiagents import Agent, MCP
+import os
+
+# Get the API key from environment
+maps_api_key = os.getenv("GOOGLE_MAPS_API_KEY")
+
+# Use a single string command with Google Maps configuration
+maps_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with Google Maps.
+ Use the available tools when relevant to handle location-based queries.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-google-maps",
+ env={"GOOGLE_MAPS_API_KEY": maps_api_key})
+)
+
+maps_agent.start("Find nearby restaurants in London")
\ No newline at end of file
diff --git a/examples/mcp/memory-mcp.py b/examples/mcp/memory-mcp.py
new file mode 100644
index 000000000..a669d5f67
--- /dev/null
+++ b/examples/mcp/memory-mcp.py
@@ -0,0 +1,16 @@
+from praisonaiagents import Agent, MCP
+import os
+
+# Get the memory file path from environment
+memory_file_path = os.getenv("MEMORY_FILE_PATH", "/path/to/custom/memory.json")
+
+# Use a single string command with Memory configuration
+memory_agent = Agent(
+ instructions="""You are a helpful assistant that can store and retrieve information.
+ Use the available tools when relevant to manage memory operations.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-memory",
+ env={"MEMORY_FILE_PATH": memory_file_path})
+)
+
+memory_agent.start("Store this conversation in memory")
\ No newline at end of file
diff --git a/examples/mcp/postgres-mcp.py b/examples/mcp/postgres-mcp.py
new file mode 100644
index 000000000..e069a4159
--- /dev/null
+++ b/examples/mcp/postgres-mcp.py
@@ -0,0 +1,15 @@
+from praisonaiagents import Agent, MCP
+import os
+
+# PostgreSQL connection string
+postgres_url = "postgresql://localhost/mydb"
+
+# Use a single string command with PostgreSQL configuration
+postgres_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with PostgreSQL databases.
+ Use the available tools when relevant to manage database operations.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-postgres", args=[postgres_url])
+)
+
+postgres_agent.start("List all tables in the database")
\ No newline at end of file
diff --git a/examples/mcp/puppeteer-mcp.py b/examples/mcp/puppeteer-mcp.py
new file mode 100644
index 000000000..933b444c5
--- /dev/null
+++ b/examples/mcp/puppeteer-mcp.py
@@ -0,0 +1,12 @@
+from praisonaiagents import Agent, MCP
+import os
+
+# Use a single string command with Puppeteer configuration
+puppeteer_agent = Agent(
+ instructions="""You are a helpful assistant that can automate web browser interactions.
+ Use the available tools when relevant to perform web automation tasks.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-puppeteer")
+)
+
+puppeteer_agent.start("Navigate to example.com and take a screenshot")
\ No newline at end of file
diff --git a/examples/mcp/redis-mcp.py b/examples/mcp/redis-mcp.py
new file mode 100644
index 000000000..d9f3d32aa
--- /dev/null
+++ b/examples/mcp/redis-mcp.py
@@ -0,0 +1,15 @@
+from praisonaiagents import Agent, MCP
+import os
+
+# Redis connection string
+redis_url = "redis://localhost:6379"
+
+# Use a single string command with Redis configuration
+redis_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with Redis.
+ Use the available tools when relevant to manage Redis operations.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-redis", args=[redis_url])
+)
+
+redis_agent.start("Set a key-value pair in Redis")
\ No newline at end of file
diff --git a/examples/mcp/sequential-thinking-mcp.py b/examples/mcp/sequential-thinking-mcp.py
new file mode 100644
index 000000000..c0f63ed6c
--- /dev/null
+++ b/examples/mcp/sequential-thinking-mcp.py
@@ -0,0 +1,12 @@
+from praisonaiagents import Agent, MCP
+import os
+
+# Use a single string command with Sequential Thinking configuration
+sequential_agent = Agent(
+ instructions="""You are a helpful assistant that can break down complex problems.
+ Use the available tools when relevant to perform step-by-step analysis.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-sequential-thinking")
+)
+
+sequential_agent.start("Break down the process of making a cup of tea")
\ No newline at end of file
diff --git a/examples/mcp/slack-mcp.py b/examples/mcp/slack-mcp.py
new file mode 100644
index 000000000..82a8d6263
--- /dev/null
+++ b/examples/mcp/slack-mcp.py
@@ -0,0 +1,20 @@
+from praisonaiagents import Agent, MCP
+import os
+
+# Get Slack credentials from environment
+slack_token = os.getenv("SLACK_BOT_TOKEN")
+slack_team_id = os.getenv("SLACK_TEAM_ID")
+
+# Use a single string command with Slack configuration
+slack_agent = Agent(
+ instructions="""You are a helpful assistant that can interact with Slack.
+ Use the available tools when relevant to manage Slack communications.""",
+ llm="gpt-4o-mini",
+ tools=MCP("npx -y @modelcontextprotocol/server-slack",
+ env={
+ "SLACK_BOT_TOKEN": slack_token,
+ "SLACK_TEAM_ID": slack_team_id
+ })
+)
+
+slack_agent.start("Send a message to the general channel")
\ No newline at end of file
diff --git a/src/praisonai-agents/mcp-mini-bravesearch.py b/src/praisonai-agents/mcp-mini-bravesearch.py
index 30c9575f0..8a47dab94 100644
--- a/src/praisonai-agents/mcp-mini-bravesearch.py
+++ b/src/praisonai-agents/mcp-mini-bravesearch.py
@@ -2,7 +2,7 @@
import os
# Use the API key from environment or set it directly
-brave_api_key = os.getenv("BRAVE_API_KEY") or "BSAbRwmwE-WV_7gKR1ZZIdE2Twa0l4w"
+brave_api_key = os.getenv("BRAVE_API_KEY")
# Use a single string command with environment variables
search_agent = Agent(