Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ RUN mkdir -p /root/.praison
# Install Python packages (using latest versions)
RUN pip install --no-cache-dir \
flask \
"praisonai>=2.2.20" \
"praisonai>=2.2.21" \
"praisonai[api]" \
gunicorn \
markdown
Expand Down
2 changes: 1 addition & 1 deletion docker/Dockerfile.chat
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ RUN mkdir -p /root/.praison
# Install Python packages (using latest versions)
RUN pip install --no-cache-dir \
praisonai_tools \
"praisonai>=2.2.20" \
"praisonai>=2.2.21" \
"praisonai[chat]" \
"embedchain[github,youtube]"

Expand Down
2 changes: 1 addition & 1 deletion docker/Dockerfile.dev
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ RUN mkdir -p /root/.praison
# Install Python packages (using latest versions)
RUN pip install --no-cache-dir \
praisonai_tools \
"praisonai>=2.2.20" \
"praisonai>=2.2.21" \
"praisonai[ui]" \
"praisonai[chat]" \
"praisonai[realtime]" \
Expand Down
2 changes: 1 addition & 1 deletion docker/Dockerfile.ui
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ RUN mkdir -p /root/.praison
# Install Python packages (using latest versions)
RUN pip install --no-cache-dir \
praisonai_tools \
"praisonai>=2.2.20" \
"praisonai>=2.2.21" \
"praisonai[ui]" \
"praisonai[crewai]"

Expand Down
4 changes: 2 additions & 2 deletions docker/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ healthcheck:
## 📦 Package Versions

All Docker images use consistent, up-to-date versions:
- PraisonAI: `>=2.2.20`
- PraisonAI: `>=2.2.21`
- PraisonAI Agents: `>=0.0.92`
- Python: `3.11-slim`

Expand Down Expand Up @@ -218,7 +218,7 @@ docker-compose up -d
### Version Pinning
To use specific versions, update the Dockerfile:
```dockerfile
RUN pip install "praisonai==2.2.20" "praisonaiagents==0.0.92"
RUN pip install "praisonai==2.2.21" "praisonaiagents==0.0.92"
```

## 🌐 Production Deployment
Expand Down
2 changes: 1 addition & 1 deletion docs/api/praisonai/deploy.html
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ <h2 id="raises">Raises</h2>
file.write(&#34;FROM python:3.11-slim\n&#34;)
file.write(&#34;WORKDIR /app\n&#34;)
file.write(&#34;COPY . .\n&#34;)
file.write(&#34;RUN pip install flask praisonai==2.2.20 gunicorn markdown\n&#34;)
file.write(&#34;RUN pip install flask praisonai==2.2.21 gunicorn markdown\n&#34;)
file.write(&#34;EXPOSE 8080\n&#34;)
file.write(&#39;CMD [&#34;gunicorn&#34;, &#34;-b&#34;, &#34;0.0.0.0:8080&#34;, &#34;api:app&#34;]\n&#39;)

Expand Down
2 changes: 1 addition & 1 deletion docs/developers/local-development.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ WORKDIR /app

COPY . .

RUN pip install flask praisonai==2.2.20 watchdog
RUN pip install flask praisonai==2.2.21 watchdog

EXPOSE 5555

Expand Down
4 changes: 2 additions & 2 deletions docs/tools/crawl4ai.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ export OPENAI_API_KEY=xxxxxxxxx

```python
import os
from crawl4ai import WebCrawler
from crawl4ai import AsyncWebCrawler
from crawl4ai.extraction_strategy import LLMExtractionStrategy
from pydantic import BaseModel, Field

Expand All @@ -41,7 +41,7 @@ class OpenAIModelFee(BaseModel):
output_fee: str = Field(..., description="Fee for output token ßfor the OpenAI model.")

url = 'https://openai.com/api/pricing/'
crawler = WebCrawler()
crawler = AsyncWebCrawler()
crawler.warmup()

result = crawler.run(
Expand Down
2 changes: 1 addition & 1 deletion docs/tools/spider_tools.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ from praisonaiagents.tools import scrape_page, extract_links, crawl, extract_tex

# Create search agent
agent = Agent(
name="WebCrawler",
name="AsyncWebCrawler",
role="Web Scraping Specialist",
goal="Extract and analyze web content efficiently.",
backstory="Expert in web scraping and content extraction.",
Expand Down
2 changes: 1 addition & 1 deletion docs/ui/chat.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ To facilitate local development with live reload, you can use Docker. Follow the

COPY . .

RUN pip install flask praisonai==2.2.20 watchdog
RUN pip install flask praisonai==2.2.21 watchdog

EXPOSE 5555

Expand Down
2 changes: 1 addition & 1 deletion docs/ui/code.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ To facilitate local development with live reload, you can use Docker. Follow the

COPY . .

RUN pip install flask praisonai==2.2.20 watchdog
RUN pip install flask praisonai==2.2.21 watchdog

EXPOSE 5555

Expand Down
16 changes: 8 additions & 8 deletions examples/cookbooks/yaml/model_fee_retreival_agents.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
},
"outputs": [],
"source": [
"from crawl4ai import WebCrawler\n",
"from crawl4ai import AsyncWebCrawler\n",
"from crawl4ai.extraction_strategy import LLMExtractionStrategy\n",
"from pydantic import BaseModel, Field\n",
"from praisonai_tools import BaseTool\n",
Expand All @@ -66,7 +66,7 @@
" description: str = \"Extracts model fees for input and output tokens from the given pricing page.\"\n",
"\n",
" def _run(self, url: str):\n",
" crawler = WebCrawler()\n",
" crawler = AsyncWebCrawler()\n",
" crawler.warmup()\n",
"\n",
" result = crawler.run(\n",
Expand Down Expand Up @@ -196,8 +196,8 @@
"output_type": "stream",
"text": [
"[LOG] 🚀 Initializing LocalSeleniumCrawlerStrategy\n",
"[LOG] 🌤️ Warming up the WebCrawler\n",
"[LOG] 🌞 WebCrawler is ready to crawl\n",
"[LOG] 🌤️ Warming up the AsyncWebCrawler\n",
"[LOG] 🌞 AsyncWebCrawler is ready to crawl\n",
"[LOG] 🚀 Crawling done for https://openai.com/api/pricing/, success: True, time taken: 0.86 seconds\n",
"[LOG] 🚀 Content extracted for https://openai.com/api/pricing/, success: True, time taken: 0.01 seconds\n",
"[LOG] 🔥 Extracting semantic blocks for https://openai.com/api/pricing/, Strategy: LLMExtractionStrategy\n",
Expand Down Expand Up @@ -245,8 +245,8 @@
"output_type": "stream",
"text": [
"[LOG] 🚀 Initializing LocalSeleniumCrawlerStrategy\n",
"[LOG] 🌤️ Warming up the WebCrawler\n",
"[LOG] 🌞 WebCrawler is ready to crawl\n",
"[LOG] 🌤️ Warming up the AsyncWebCrawler\n",
"[LOG] 🌞 AsyncWebCrawler is ready to crawl\n",
"[LOG] 🚀 Crawling done for https://www.anthropic.com/pricing, success: True, time taken: 2.93 seconds\n",
"[LOG] 🚀 Content extracted for https://www.anthropic.com/pricing, success: True, time taken: 0.06 seconds\n",
"[LOG] 🔥 Extracting semantic blocks for https://www.anthropic.com/pricing, Strategy: LLMExtractionStrategy\n",
Expand Down Expand Up @@ -323,8 +323,8 @@
"output_type": "stream",
"text": [
"[LOG] 🚀 Initializing LocalSeleniumCrawlerStrategy\n",
"[LOG] 🌤️ Warming up the WebCrawler\n",
"[LOG] 🌞 WebCrawler is ready to crawl\n",
"[LOG] 🌤️ Warming up the AsyncWebCrawler\n",
"[LOG] 🌞 AsyncWebCrawler is ready to crawl\n",
"[LOG] 🚀 Crawling done for https://cohere.com/pricing, success: True, time taken: 6.34 seconds\n",
"[LOG] 🚀 Content extracted for https://cohere.com/pricing, success: True, time taken: 0.14 seconds\n",
"[LOG] 🔥 Extracting semantic blocks for https://cohere.com/pricing, Strategy: LLMExtractionStrategy\n",
Expand Down
4 changes: 2 additions & 2 deletions src/praisonai/praisonai.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ class Praisonai < Formula

desc "AI tools for various AI applications"
homepage "https://github.com/MervinPraison/PraisonAI"
url "https://github.com/MervinPraison/PraisonAI/archive/refs/tags/v2.2.20.tar.gz"
sha256 `curl -sL https://github.com/MervinPraison/PraisonAI/archive/refs/tags/v2.2.20.tar.gz | shasum -a 256`.split.first
url "https://github.com/MervinPraison/PraisonAI/archive/refs/tags/v2.2.21.tar.gz"
sha256 `curl -sL https://github.com/MervinPraison/PraisonAI/archive/refs/tags/v2.2.21.tar.gz | shasum -a 256`.split.first
license "MIT"

depends_on "python@3.11"
Expand Down
2 changes: 1 addition & 1 deletion src/praisonai/praisonai/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def create_dockerfile(self):
file.write("FROM python:3.11-slim\n")
file.write("WORKDIR /app\n")
file.write("COPY . .\n")
file.write("RUN pip install flask praisonai==2.2.20 gunicorn markdown\n")
file.write("RUN pip install flask praisonai==2.2.21 gunicorn markdown\n")
file.write("EXPOSE 8080\n")
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')

Expand Down
4 changes: 2 additions & 2 deletions src/praisonai/praisonai/ui/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from dotenv import load_dotenv
from PIL import Image
from tavily import TavilyClient
from crawl4ai import AsyncWebCrawler
from crawl4ai import AsyncAsyncWebCrawler
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

critical

It seems there might be a typo in the imported class name. AsyncAsyncWebCrawler is likely intended to be AsyncWebCrawler. Could you please verify the correct class name from the crawl4ai library? If this is a typo, it will cause an ImportError at runtime.

Suggested change
from crawl4ai import AsyncAsyncWebCrawler
from crawl4ai import AsyncWebCrawler


# Local application/library imports
import chainlit as cl
Expand Down Expand Up @@ -72,7 +72,7 @@ async def tavily_web_search(query):
response = tavily_client.search(query)
logger.debug(f"Tavily search response: {response}")

async with AsyncWebCrawler() as crawler:
async with AsyncAsyncWebCrawler() as crawler:
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

critical

Similar to the import, this instantiation of AsyncAsyncWebCrawler appears to be a typo. If the class is indeed AsyncWebCrawler, this line will need to be corrected to avoid a NameError at runtime.

Suggested change
async with AsyncAsyncWebCrawler() as crawler:
async with AsyncWebCrawler() as crawler:

results = []
for result in response.get('results', []):
url = result.get('url')
Expand Down
6 changes: 3 additions & 3 deletions src/praisonai/praisonai/ui/code.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from PIL import Image
from context import ContextGatherer
from tavily import TavilyClient
from crawl4ai import AsyncWebCrawler
from crawl4ai import AsyncAsyncWebCrawler
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

critical

There appears to be a typo in the imported class name here as well. AsyncAsyncWebCrawler should likely be AsyncWebCrawler. Please check the crawl4ai library for the correct class name to prevent ImportError.

Suggested change
from crawl4ai import AsyncAsyncWebCrawler
from crawl4ai import AsyncWebCrawler


# Local application/library imports
import chainlit as cl
Expand Down Expand Up @@ -153,8 +153,8 @@ async def tavily_web_search(query):
response = tavily_client.search(query)
logger.debug(f"Tavily search response: {response}")

# Create an instance of AsyncWebCrawler
async with AsyncWebCrawler() as crawler:
# Create an instance of AsyncAsyncWebCrawler
async with AsyncAsyncWebCrawler() as crawler:
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

critical

This instantiation of AsyncAsyncWebCrawler seems to be a typo, consistent with the import. If the correct class is AsyncWebCrawler, this should be updated to prevent a NameError.

Suggested change
async with AsyncAsyncWebCrawler() as crawler:
async with AsyncWebCrawler() as crawler:

# Prepare the results
results = []
for result in response.get('results', []):
Expand Down
4 changes: 2 additions & 2 deletions src/praisonai/praisonai/ui/components/aicoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import json
import dotenv
from tavily import TavilyClient
from crawl4ai import AsyncWebCrawler
from crawl4ai import AsyncAsyncWebCrawler
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

critical

It looks like there's a typo in the imported class name. AsyncAsyncWebCrawler is probably meant to be AsyncWebCrawler. Could you confirm the correct class name from crawl4ai to avoid an ImportError?

Suggested change
from crawl4ai import AsyncAsyncWebCrawler
from crawl4ai import AsyncWebCrawler


dotenv.load_dotenv()

Expand Down Expand Up @@ -144,7 +144,7 @@ async def tavily_web_search(self, query):
})
response = self.tavily_client.search(query)
results = []
async with AsyncWebCrawler() as crawler:
async with AsyncAsyncWebCrawler() as crawler:
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

critical

This instantiation of AsyncAsyncWebCrawler also appears to be a typo. If AsyncWebCrawler is the correct class, this line needs to be changed to prevent a NameError at runtime.

Suggested change
async with AsyncAsyncWebCrawler() as crawler:
async with AsyncWebCrawler() as crawler:

for result in response.get('results', []):
url = result.get('url')
if url:
Expand Down
6 changes: 3 additions & 3 deletions src/praisonai/praisonai/ui/realtimeclient/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import plotly
import json
from tavily import TavilyClient
from crawl4ai import WebCrawler
from crawl4ai import AsyncWebCrawler
import os
import logging
import asyncio
Expand Down Expand Up @@ -122,7 +122,7 @@ async def tavily_web_search_handler(query):
})

def process_tavily_results(response):
crawler = WebCrawler()
crawler = AsyncWebCrawler()
crawler.warmup()
results = []
for result in response.get('results', []):
Expand Down Expand Up @@ -151,7 +151,7 @@ async def fallback_to_duckduckgo(query):

logger.debug(f"DuckDuckGo search results: {ddg_results}")

crawler = WebCrawler()
crawler = AsyncWebCrawler()
crawler.warmup()
results = []

Expand Down
4 changes: 2 additions & 2 deletions src/praisonai/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "PraisonAI"
version = "2.2.20"
version = "2.2.21"
description = "PraisonAI is an AI Agents Framework with Self Reflection. PraisonAI application combines PraisonAI Agents, AutoGen, and CrewAI into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customisation, and efficient human-agent collaboration."
readme = "README.md"
license = ""
Expand Down Expand Up @@ -94,7 +94,7 @@ autogen = ["pyautogen>=0.2.19", "praisonai-tools>=0.0.15", "crewai"]

[tool.poetry]
name = "PraisonAI"
version = "2.2.20"
version = "2.2.21"
description = "PraisonAI is an AI Agents Framework with Self Reflection. PraisonAI application combines PraisonAI Agents, AutoGen, and CrewAI into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customisation, and efficient human-agent collaboration."
authors = ["Mervin Praison"]
license = ""
Expand Down
2 changes: 1 addition & 1 deletion src/praisonai/uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading