diff --git a/docker/Dockerfile b/docker/Dockerfile
index a620445da..5c2de4279 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -16,7 +16,7 @@ RUN mkdir -p /root/.praison
# Install Python packages (using latest versions)
RUN pip install --no-cache-dir \
flask \
- "praisonai>=2.2.20" \
+ "praisonai>=2.2.21" \
"praisonai[api]" \
gunicorn \
markdown
diff --git a/docker/Dockerfile.chat b/docker/Dockerfile.chat
index 464eace1f..37a5c2eff 100644
--- a/docker/Dockerfile.chat
+++ b/docker/Dockerfile.chat
@@ -16,7 +16,7 @@ RUN mkdir -p /root/.praison
# Install Python packages (using latest versions)
RUN pip install --no-cache-dir \
praisonai_tools \
- "praisonai>=2.2.20" \
+ "praisonai>=2.2.21" \
"praisonai[chat]" \
"embedchain[github,youtube]"
diff --git a/docker/Dockerfile.dev b/docker/Dockerfile.dev
index 1a6e2e1e2..9714a63c9 100644
--- a/docker/Dockerfile.dev
+++ b/docker/Dockerfile.dev
@@ -20,7 +20,7 @@ RUN mkdir -p /root/.praison
# Install Python packages (using latest versions)
RUN pip install --no-cache-dir \
praisonai_tools \
- "praisonai>=2.2.20" \
+ "praisonai>=2.2.21" \
"praisonai[ui]" \
"praisonai[chat]" \
"praisonai[realtime]" \
diff --git a/docker/Dockerfile.ui b/docker/Dockerfile.ui
index 3ed865e22..9b2cf4052 100644
--- a/docker/Dockerfile.ui
+++ b/docker/Dockerfile.ui
@@ -16,7 +16,7 @@ RUN mkdir -p /root/.praison
# Install Python packages (using latest versions)
RUN pip install --no-cache-dir \
praisonai_tools \
- "praisonai>=2.2.20" \
+ "praisonai>=2.2.21" \
"praisonai[ui]" \
"praisonai[crewai]"
diff --git a/docker/README.md b/docker/README.md
index 3e2326eac..84d33e76c 100644
--- a/docker/README.md
+++ b/docker/README.md
@@ -121,7 +121,7 @@ healthcheck:
## π¦ Package Versions
All Docker images use consistent, up-to-date versions:
-- PraisonAI: `>=2.2.20`
+- PraisonAI: `>=2.2.21`
- PraisonAI Agents: `>=0.0.92`
- Python: `3.11-slim`
@@ -218,7 +218,7 @@ docker-compose up -d
### Version Pinning
To use specific versions, update the Dockerfile:
```dockerfile
-RUN pip install "praisonai==2.2.20" "praisonaiagents==0.0.92"
+RUN pip install "praisonai==2.2.21" "praisonaiagents==0.0.92"
```
## π Production Deployment
diff --git a/docs/api/praisonai/deploy.html b/docs/api/praisonai/deploy.html
index 21eef6859..e5d2b7269 100644
--- a/docs/api/praisonai/deploy.html
+++ b/docs/api/praisonai/deploy.html
@@ -110,7 +110,7 @@
Raises
file.write("FROM python:3.11-slim\n")
file.write("WORKDIR /app\n")
file.write("COPY . .\n")
- file.write("RUN pip install flask praisonai==2.2.20 gunicorn markdown\n")
+ file.write("RUN pip install flask praisonai==2.2.21 gunicorn markdown\n")
file.write("EXPOSE 8080\n")
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
diff --git a/docs/developers/local-development.mdx b/docs/developers/local-development.mdx
index 82f8ac7cd..ff67702e7 100644
--- a/docs/developers/local-development.mdx
+++ b/docs/developers/local-development.mdx
@@ -27,7 +27,7 @@ WORKDIR /app
COPY . .
-RUN pip install flask praisonai==2.2.20 watchdog
+RUN pip install flask praisonai==2.2.21 watchdog
EXPOSE 5555
diff --git a/docs/tools/crawl4ai.mdx b/docs/tools/crawl4ai.mdx
index 3ba8b8084..9521e1801 100644
--- a/docs/tools/crawl4ai.mdx
+++ b/docs/tools/crawl4ai.mdx
@@ -31,7 +31,7 @@ export OPENAI_API_KEY=xxxxxxxxx
```python
import os
-from crawl4ai import WebCrawler
+from crawl4ai import AsyncWebCrawler
from crawl4ai.extraction_strategy import LLMExtractionStrategy
from pydantic import BaseModel, Field
@@ -41,7 +41,7 @@ class OpenAIModelFee(BaseModel):
output_fee: str = Field(..., description="Fee for output token Γfor the OpenAI model.")
url = 'https://openai.com/api/pricing/'
-crawler = WebCrawler()
+crawler = AsyncWebCrawler()
crawler.warmup()
result = crawler.run(
diff --git a/docs/tools/spider_tools.mdx b/docs/tools/spider_tools.mdx
index 749192de9..fb9f347ec 100644
--- a/docs/tools/spider_tools.mdx
+++ b/docs/tools/spider_tools.mdx
@@ -120,7 +120,7 @@ from praisonaiagents.tools import scrape_page, extract_links, crawl, extract_tex
# Create search agent
agent = Agent(
- name="WebCrawler",
+ name="AsyncWebCrawler",
role="Web Scraping Specialist",
goal="Extract and analyze web content efficiently.",
backstory="Expert in web scraping and content extraction.",
diff --git a/docs/ui/chat.mdx b/docs/ui/chat.mdx
index 8fe7725e2..8496f6284 100644
--- a/docs/ui/chat.mdx
+++ b/docs/ui/chat.mdx
@@ -155,7 +155,7 @@ To facilitate local development with live reload, you can use Docker. Follow the
COPY . .
- RUN pip install flask praisonai==2.2.20 watchdog
+ RUN pip install flask praisonai==2.2.21 watchdog
EXPOSE 5555
diff --git a/docs/ui/code.mdx b/docs/ui/code.mdx
index 890eb66b8..4ca4a7181 100644
--- a/docs/ui/code.mdx
+++ b/docs/ui/code.mdx
@@ -208,7 +208,7 @@ To facilitate local development with live reload, you can use Docker. Follow the
COPY . .
- RUN pip install flask praisonai==2.2.20 watchdog
+ RUN pip install flask praisonai==2.2.21 watchdog
EXPOSE 5555
diff --git a/examples/cookbooks/yaml/model_fee_retreival_agents.ipynb b/examples/cookbooks/yaml/model_fee_retreival_agents.ipynb
index 53cc8902a..1f6982676 100644
--- a/examples/cookbooks/yaml/model_fee_retreival_agents.ipynb
+++ b/examples/cookbooks/yaml/model_fee_retreival_agents.ipynb
@@ -51,7 +51,7 @@
},
"outputs": [],
"source": [
- "from crawl4ai import WebCrawler\n",
+ "from crawl4ai import AsyncWebCrawler\n",
"from crawl4ai.extraction_strategy import LLMExtractionStrategy\n",
"from pydantic import BaseModel, Field\n",
"from praisonai_tools import BaseTool\n",
@@ -66,7 +66,7 @@
" description: str = \"Extracts model fees for input and output tokens from the given pricing page.\"\n",
"\n",
" def _run(self, url: str):\n",
- " crawler = WebCrawler()\n",
+ " crawler = AsyncWebCrawler()\n",
" crawler.warmup()\n",
"\n",
" result = crawler.run(\n",
@@ -196,8 +196,8 @@
"output_type": "stream",
"text": [
"[LOG] π Initializing LocalSeleniumCrawlerStrategy\n",
- "[LOG] π€οΈ Warming up the WebCrawler\n",
- "[LOG] π WebCrawler is ready to crawl\n",
+ "[LOG] π€οΈ Warming up the AsyncWebCrawler\n",
+ "[LOG] π AsyncWebCrawler is ready to crawl\n",
"[LOG] π Crawling done for https://openai.com/api/pricing/, success: True, time taken: 0.86 seconds\n",
"[LOG] π Content extracted for https://openai.com/api/pricing/, success: True, time taken: 0.01 seconds\n",
"[LOG] π₯ Extracting semantic blocks for https://openai.com/api/pricing/, Strategy: LLMExtractionStrategy\n",
@@ -245,8 +245,8 @@
"output_type": "stream",
"text": [
"[LOG] π Initializing LocalSeleniumCrawlerStrategy\n",
- "[LOG] π€οΈ Warming up the WebCrawler\n",
- "[LOG] π WebCrawler is ready to crawl\n",
+ "[LOG] π€οΈ Warming up the AsyncWebCrawler\n",
+ "[LOG] π AsyncWebCrawler is ready to crawl\n",
"[LOG] π Crawling done for https://www.anthropic.com/pricing, success: True, time taken: 2.93 seconds\n",
"[LOG] π Content extracted for https://www.anthropic.com/pricing, success: True, time taken: 0.06 seconds\n",
"[LOG] π₯ Extracting semantic blocks for https://www.anthropic.com/pricing, Strategy: LLMExtractionStrategy\n",
@@ -323,8 +323,8 @@
"output_type": "stream",
"text": [
"[LOG] π Initializing LocalSeleniumCrawlerStrategy\n",
- "[LOG] π€οΈ Warming up the WebCrawler\n",
- "[LOG] π WebCrawler is ready to crawl\n",
+ "[LOG] π€οΈ Warming up the AsyncWebCrawler\n",
+ "[LOG] π AsyncWebCrawler is ready to crawl\n",
"[LOG] π Crawling done for https://cohere.com/pricing, success: True, time taken: 6.34 seconds\n",
"[LOG] π Content extracted for https://cohere.com/pricing, success: True, time taken: 0.14 seconds\n",
"[LOG] π₯ Extracting semantic blocks for https://cohere.com/pricing, Strategy: LLMExtractionStrategy\n",
diff --git a/src/praisonai/praisonai.rb b/src/praisonai/praisonai.rb
index cfecb5cbb..12b7861ae 100644
--- a/src/praisonai/praisonai.rb
+++ b/src/praisonai/praisonai.rb
@@ -3,8 +3,8 @@ class Praisonai < Formula
desc "AI tools for various AI applications"
homepage "https://github.com/MervinPraison/PraisonAI"
- url "https://github.com/MervinPraison/PraisonAI/archive/refs/tags/v2.2.20.tar.gz"
- sha256 `curl -sL https://github.com/MervinPraison/PraisonAI/archive/refs/tags/v2.2.20.tar.gz | shasum -a 256`.split.first
+ url "https://github.com/MervinPraison/PraisonAI/archive/refs/tags/v2.2.21.tar.gz"
+ sha256 `curl -sL https://github.com/MervinPraison/PraisonAI/archive/refs/tags/v2.2.21.tar.gz | shasum -a 256`.split.first
license "MIT"
depends_on "python@3.11"
diff --git a/src/praisonai/praisonai/deploy.py b/src/praisonai/praisonai/deploy.py
index 4f7148e73..c5abe93b5 100644
--- a/src/praisonai/praisonai/deploy.py
+++ b/src/praisonai/praisonai/deploy.py
@@ -56,7 +56,7 @@ def create_dockerfile(self):
file.write("FROM python:3.11-slim\n")
file.write("WORKDIR /app\n")
file.write("COPY . .\n")
- file.write("RUN pip install flask praisonai==2.2.20 gunicorn markdown\n")
+ file.write("RUN pip install flask praisonai==2.2.21 gunicorn markdown\n")
file.write("EXPOSE 8080\n")
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
diff --git a/src/praisonai/praisonai/ui/chat.py b/src/praisonai/praisonai/ui/chat.py
index 5177e7564..fc5ebd77a 100644
--- a/src/praisonai/praisonai/ui/chat.py
+++ b/src/praisonai/praisonai/ui/chat.py
@@ -12,7 +12,7 @@
from dotenv import load_dotenv
from PIL import Image
from tavily import TavilyClient
-from crawl4ai import AsyncWebCrawler
+from crawl4ai import AsyncAsyncWebCrawler
# Local application/library imports
import chainlit as cl
@@ -72,7 +72,7 @@ async def tavily_web_search(query):
response = tavily_client.search(query)
logger.debug(f"Tavily search response: {response}")
- async with AsyncWebCrawler() as crawler:
+ async with AsyncAsyncWebCrawler() as crawler:
results = []
for result in response.get('results', []):
url = result.get('url')
diff --git a/src/praisonai/praisonai/ui/code.py b/src/praisonai/praisonai/ui/code.py
index bc26428c5..af085093c 100644
--- a/src/praisonai/praisonai/ui/code.py
+++ b/src/praisonai/praisonai/ui/code.py
@@ -12,7 +12,7 @@
from PIL import Image
from context import ContextGatherer
from tavily import TavilyClient
-from crawl4ai import AsyncWebCrawler
+from crawl4ai import AsyncAsyncWebCrawler
# Local application/library imports
import chainlit as cl
@@ -153,8 +153,8 @@ async def tavily_web_search(query):
response = tavily_client.search(query)
logger.debug(f"Tavily search response: {response}")
- # Create an instance of AsyncWebCrawler
- async with AsyncWebCrawler() as crawler:
+ # Create an instance of AsyncAsyncWebCrawler
+ async with AsyncAsyncWebCrawler() as crawler:
# Prepare the results
results = []
for result in response.get('results', []):
diff --git a/src/praisonai/praisonai/ui/components/aicoder.py b/src/praisonai/praisonai/ui/components/aicoder.py
index 28632a1cd..8797e0e0f 100644
--- a/src/praisonai/praisonai/ui/components/aicoder.py
+++ b/src/praisonai/praisonai/ui/components/aicoder.py
@@ -7,7 +7,7 @@
import json
import dotenv
from tavily import TavilyClient
-from crawl4ai import AsyncWebCrawler
+from crawl4ai import AsyncAsyncWebCrawler
dotenv.load_dotenv()
@@ -144,7 +144,7 @@ async def tavily_web_search(self, query):
})
response = self.tavily_client.search(query)
results = []
- async with AsyncWebCrawler() as crawler:
+ async with AsyncAsyncWebCrawler() as crawler:
for result in response.get('results', []):
url = result.get('url')
if url:
diff --git a/src/praisonai/praisonai/ui/realtimeclient/tools.py b/src/praisonai/praisonai/ui/realtimeclient/tools.py
index a870305a4..b12a5e82f 100644
--- a/src/praisonai/praisonai/ui/realtimeclient/tools.py
+++ b/src/praisonai/praisonai/ui/realtimeclient/tools.py
@@ -3,7 +3,7 @@
import plotly
import json
from tavily import TavilyClient
-from crawl4ai import WebCrawler
+from crawl4ai import AsyncWebCrawler
import os
import logging
import asyncio
@@ -122,7 +122,7 @@ async def tavily_web_search_handler(query):
})
def process_tavily_results(response):
- crawler = WebCrawler()
+ crawler = AsyncWebCrawler()
crawler.warmup()
results = []
for result in response.get('results', []):
@@ -151,7 +151,7 @@ async def fallback_to_duckduckgo(query):
logger.debug(f"DuckDuckGo search results: {ddg_results}")
- crawler = WebCrawler()
+ crawler = AsyncWebCrawler()
crawler.warmup()
results = []
diff --git a/src/praisonai/pyproject.toml b/src/praisonai/pyproject.toml
index 5a1489b7c..b9a4c859e 100644
--- a/src/praisonai/pyproject.toml
+++ b/src/praisonai/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "PraisonAI"
-version = "2.2.20"
+version = "2.2.21"
description = "PraisonAI is an AI Agents Framework with Self Reflection. PraisonAI application combines PraisonAI Agents, AutoGen, and CrewAI into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customisation, and efficient human-agent collaboration."
readme = "README.md"
license = ""
@@ -94,7 +94,7 @@ autogen = ["pyautogen>=0.2.19", "praisonai-tools>=0.0.15", "crewai"]
[tool.poetry]
name = "PraisonAI"
-version = "2.2.20"
+version = "2.2.21"
description = "PraisonAI is an AI Agents Framework with Self Reflection. PraisonAI application combines PraisonAI Agents, AutoGen, and CrewAI into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customisation, and efficient human-agent collaboration."
authors = ["Mervin Praison"]
license = ""
diff --git a/src/praisonai/uv.lock b/src/praisonai/uv.lock
index 041afa143..cb4cdca88 100644
--- a/src/praisonai/uv.lock
+++ b/src/praisonai/uv.lock
@@ -3931,7 +3931,7 @@ wheels = [
[[package]]
name = "praisonai"
-version = "2.2.20"
+version = "2.2.21"
source = { editable = "." }
dependencies = [
{ name = "instructor" },