Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 34 additions & 4 deletions backend/app/agent/agent_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@

from app.agent.listen_chat_agent import ListenChatAgent, logger
from app.model.chat import AgentModelConfig, Chat
from app.model.model_platform import patch_bedrock_cloud_config
from app.service.task import ActionCreateAgentData, Agents, get_task_lock
from app.utils.event_loop_utils import _schedule_async_task

Expand Down Expand Up @@ -80,7 +81,14 @@ def agent_model(
for attr in config_attrs:
effective_config[attr] = getattr(options, attr)
extra_params = options.extra_params or {}

# Cloud mode: inject default Bedrock region and adjust URL for proxy.
if (
effective_config.get("model_platform") == "aws-bedrock-converse"
and options.is_cloud()
):
effective_config["api_url"], extra_params = patch_bedrock_cloud_config(
effective_config["api_url"], extra_params
)
init_param_keys = {
"api_version",
"azure_ad_token",
Expand All @@ -90,6 +98,10 @@ def agent_model(
"client",
"async_client",
"azure_deployment_name",
"region_name",
"aws_access_key_id",
"aws_secret_access_key",
"aws_session_token",
}

init_params = {}
Expand All @@ -113,6 +125,26 @@ def agent_model(
else:
model_config[k] = v

# Auto-inject prompt caching based on model platform
try:
model_platform_enum = ModelPlatformType(
effective_config["model_platform"].lower()
)
if model_platform_enum in {
ModelPlatformType.ANTHROPIC,
ModelPlatformType.AWS_BEDROCK_CONVERSE,
}:
model_config.setdefault("cache_control", "5m")
elif model_platform_enum == ModelPlatformType.OPENAI:
Comment thread
Zephyroam marked this conversation as resolved.
model_config.setdefault(
"prompt_cache_key", str(options.project_id)
)
except (ValueError, AttributeError):
logging.error(
f"Invalid model platform: {effective_config['model_platform']}",
exc_info=True,
)

if agent_name == Agents.task_agent:
model_config["stream"] = True
if agent_name == Agents.browser_agent:
Expand All @@ -137,10 +169,8 @@ def agent_model(
model_platform_enum = None

if effective_config["model_platform"].lower() == "anthropic":
if model_config.get("cache_control") is None:
model_config["cache_control"] = "5m"
if model_config.get("max_tokens") is None:
model_config["max_tokens"] = 64000
model_config["max_tokens"] = 128000

model = ModelFactory.create(
model_platform=effective_config["model_platform"],
Expand Down
51 changes: 38 additions & 13 deletions backend/app/agent/factory/mcp.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,18 @@
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
import asyncio
import logging
import uuid

from camel.models import ModelFactory
from camel.types import ModelPlatformType

from app.agent.listen_chat_agent import ListenChatAgent, logger
from app.agent.prompt import MCP_SYS_PROMPT
from app.agent.toolkit.mcp_search_toolkit import McpSearchToolkit
from app.agent.tools import get_mcp_tools
from app.model.chat import Chat
from app.model.model_platform import patch_bedrock_cloud_config
from app.service.task import ActionCreateAgentData, Agents, get_task_lock


Expand Down Expand Up @@ -73,6 +76,38 @@ async def mcp_agent(options: Chat):
)
)
)
extra_params = {
k: v
for k, v in (options.extra_params or {}).items()
if k not in ["model_platform", "model_type", "api_key", "url"]
}
api_url = options.api_url
if options.model_platform == "aws-bedrock-converse" and options.is_cloud():
api_url, extra_params = patch_bedrock_cloud_config(
api_url, extra_params
)

# Build model_config_dict with prompt caching
model_config_dict = {}
if options.is_cloud():
model_config_dict["user"] = str(options.project_id)
try:
platform_enum = ModelPlatformType(options.model_platform.lower())
if platform_enum in {
ModelPlatformType.ANTHROPIC,
ModelPlatformType.AWS_BEDROCK_CONVERSE,
}:
model_config_dict.setdefault("cache_control", "5m")
elif platform_enum == ModelPlatformType.OPENAI:
model_config_dict.setdefault(
"prompt_cache_key", str(options.project_id)
)
except (ValueError, AttributeError):
logging.error(
f"Invalid model platform: {options.model_platform}",
exc_info=True,
)

Comment thread
Zephyroam marked this conversation as resolved.
return ListenChatAgent(
options.project_id,
Agents.mcp_agent,
Expand All @@ -81,20 +116,10 @@ async def mcp_agent(options: Chat):
model_platform=options.model_platform,
model_type=options.model_type,
api_key=options.api_key,
url=options.api_url,
model_config_dict=(
{
"user": str(options.project_id),
}
if options.is_cloud()
else None
),
url=api_url,
model_config_dict=model_config_dict or None,
timeout=600, # 10 minutes
**{
k: v
for k, v in (options.extra_params or {}).items()
if k not in ["model_platform", "model_type", "api_key", "url"]
},
**extra_params,
),
# output_language=options.language,
tools=tools,
Expand Down
6 changes: 6 additions & 0 deletions backend/app/component/model_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
from camel.agents import ChatAgent
from camel.models import ModelFactory, ModelProcessingError

from app.model.model_platform import BEDROCK_CONVERSE_REGION

logger = logging.getLogger("model_validation")

# Expected result from tool execution for validation
Expand Down Expand Up @@ -231,6 +233,8 @@ def create_agent(
model_config_dict = dict(model_config_dict or {})
if model_config_dict.get("max_tokens") is None:
model_config_dict["max_tokens"] = 4096
if str(platform).lower() == "aws-bedrock-converse":
kwargs.setdefault("region_name", BEDROCK_CONVERSE_REGION)
model = ModelFactory.create(
model_platform=platform,
model_type=mtype,
Expand Down Expand Up @@ -334,6 +338,8 @@ def validate_model_with_details(
model_config_dict = dict(model_config_dict or {})
if model_config_dict.get("max_tokens") is None:
model_config_dict["max_tokens"] = 4096
if str(model_platform).lower() == "aws-bedrock-converse":
kwargs.setdefault("region_name", BEDROCK_CONVERSE_REGION)
model = ModelFactory.create(
model_platform=model_platform,
model_type=model_type,
Expand Down
2 changes: 1 addition & 1 deletion backend/app/model/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def get_uvx_env(self) -> dict[str, str]:
)

def is_cloud(self):
return self.api_url is not None and "44.247.171.124" in self.api_url
return self.api_url is not None and "eigent-proxy" in self.api_url

def file_save_path(self, path: str | None = None):
email = re.sub(r'[\\/*?:"<>|\s]', "_", self.email.split("@")[0]).strip(
Expand Down
18 changes: 18 additions & 0 deletions backend/app/model/model_platform.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,24 @@
"llama.cpp": "openai-compatible-model",
}

# Bedrock Converse requires a region during model initialization.
BEDROCK_CONVERSE_REGION: Final[str] = "us-west-2"
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Users who need a different Bedrock region (e.g., us-east-1, eu-west-1) won't be able to use this at all. It would be great to make this user-configurable, or at least read it from an environment variable.

Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Now eigent cloud and BYOK are separated.



def patch_bedrock_cloud_config(
api_url: str, extra_params: dict
) -> tuple[str, dict]:
"""Patch API URL and extra_params for Bedrock Converse in cloud mode.

Appends '/bedrock' to the proxy URL and defaults the region.
Returns the updated (api_url, extra_params).
"""
extra_params = dict(extra_params)
extra_params.setdefault("region_name", BEDROCK_CONVERSE_REGION)
if not api_url.rstrip("/").endswith("/bedrock"):
api_url = api_url + "/bedrock"
return api_url, extra_params


def normalize_model_platform(platform: str) -> str:
"""Normalize provider aliases to supported model platform names."""
Expand Down
23 changes: 21 additions & 2 deletions backend/app/utils/single_agent_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,15 @@
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========

import asyncio
import datetime
import logging
from collections.abc import Awaitable, Callable

from camel.agents.chat_agent import AsyncStreamingChatAgentResponse
from camel.agents.chat_agent import (
AsyncStreamingChatAgentResponse,
ChatAgentResponse,
)
from camel.societies.workforce.prompts import PROCESS_TASK_PROMPT
from camel.societies.workforce.single_agent_worker import (
SingleAgentWorker as BaseSingleAgentWorker,
Expand Down Expand Up @@ -67,7 +72,13 @@ def __init__(
self.worker = worker # change type hint

async def _process_task(
self, task: Task, dependencies: list[Task], stream_callback=None
self,
task: Task,
dependencies: list[Task],
stream_callback: Callable[
["ChatAgentResponse"], Awaitable[None] | None
]
| None = None,
Comment on lines +75 to +81
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The new stream_callback looks like an independent feature enhancement. It might be better to split this out into a separate PR to keep things focused and easier to review.

Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is because of camel-ai/camel#3744. All we need to do is just add the corresponding arguments. So I guess it should be fine in this PR?

) -> TaskState:
r"""Processes a task with its dependencies using an efficient agent
management system.
Expand Down Expand Up @@ -146,6 +157,10 @@ async def _process_task(
async for chunk in response:
chunk_count += 1
last_chunk = chunk
if stream_callback:
maybe = stream_callback(chunk)
if asyncio.iscoroutine(maybe):
await maybe
if chunk.msg and chunk.msg.content:
accumulated_content += chunk.msg.content
logger.info(
Expand Down Expand Up @@ -186,6 +201,10 @@ async def _process_task(
last_chunk = None
async for chunk in response:
last_chunk = chunk
if stream_callback:
maybe = stream_callback(chunk)
if asyncio.iscoroutine(maybe):
await maybe
if chunk.msg:
if chunk.msg.content:
accumulated_content += chunk.msg.content
Expand Down
2 changes: 1 addition & 1 deletion backend/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ readme = "README.md"
requires-python = ">=3.11,<3.12"
dependencies = [
"pip>=23.0",
"camel-ai[eigent]==0.2.90a6",
"camel-ai[eigent]==0.2.90",
"fastapi>=0.115.12",
"fastapi-babel>=1.0.0",
"uvicorn[standard]>=0.34.2",
Expand Down
19 changes: 19 additions & 0 deletions backend/tests/app/component/test_model_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,25 @@ def test_create_agent_invalid_model_platform():
create_agent(model_platform=None, model_type="GPT_4O_MINI")


@pytest.mark.unit
@patch("app.component.model_validation.ModelFactory.create")
@patch("app.component.model_validation.ChatAgent")
def test_create_agent_hardcodes_bedrock_converse_region(
mock_chat_agent, mock_model_factory
):
"""Test Bedrock Converse validation always uses the hardcoded region."""
mock_model_factory.return_value = MagicMock()
mock_chat_agent.return_value = MagicMock()

create_agent(
model_platform="aws-bedrock-converse",
model_type="anthropic.claude-3-5-sonnet",
api_key="test_key",
)

assert mock_model_factory.call_args.kwargs["region_name"] == "us-west-2"


@pytest.mark.unit
def test_validation_missing_model_type():
"""Test validation with missing model type."""
Expand Down
10 changes: 5 additions & 5 deletions backend/uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading
Loading