Skip to content

Commit 1d4721d

Browse files
feat(ag2): add AG2 framework backend integration (#1156)
feat(ag2): add AG2 framework backend integration
2 parents 29facc8 + 90915fe commit 1d4721d

10 files changed

Lines changed: 1428 additions & 2 deletions

File tree

examples/ag2/ag2_basic.yaml

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
framework: ag2
2+
topic: "Research the latest developments in AI agents"
3+
4+
# Install: pip install "praisonai[ag2]"
5+
# Run: praisonai --framework ag2 examples/ag2/ag2_basic.yaml
6+
# or praisonai run examples/ag2/ag2_basic.yaml --framework ag2
7+
8+
roles:
9+
research_agent:
10+
role: "AI Research Specialist"
11+
goal: "Research and summarise the latest developments in AI agent frameworks"
12+
backstory: |
13+
You are an experienced AI researcher with deep knowledge of multi-agent
14+
systems, large language models, and the latest trends in AI tooling.
15+
You excel at synthesising complex technical topics into clear summaries.
16+
tasks:
17+
research_task:
18+
description: |
19+
Research and summarise the latest developments in AI agent frameworks
20+
for the topic: {topic}
21+
22+
Focus on:
23+
1. Key frameworks and their unique capabilities
24+
2. Recent innovations and improvements
25+
3. Community adoption and ecosystem growth
26+
4. Practical use cases and success stories
27+
expected_output: |
28+
A concise research summary covering the key developments,
29+
major frameworks, and practical insights. Include 3-5 bullet
30+
points of the most important findings.

examples/ag2/ag2_bedrock.yaml

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
framework: ag2
2+
topic: "Cloud-native AI deployment strategies on AWS"
3+
4+
# AG2 exclusive feature: native AWS Bedrock support via LLMConfig(api_type="bedrock")
5+
#
6+
# Prerequisites:
7+
# pip install "praisonai[ag2]"
8+
# aws configure (or set AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_DEFAULT_REGION)
9+
#
10+
# Run:
11+
# praisonai --framework ag2 examples/ag2/ag2_bedrock.yaml
12+
#
13+
# The AG2 adapter detects api_type="bedrock" from the llm config and uses
14+
# LLMConfig(api_type="bedrock", model=...) — no OPENAI_API_KEY required.
15+
# AWS credentials are sourced from boto3 (env vars, ~/.aws/credentials, IAM role).
16+
17+
roles:
18+
cloud_architect:
19+
role: "AWS Cloud Architect"
20+
goal: "Design and explain cloud-native AI deployment strategies on AWS"
21+
backstory: |
22+
You are an AWS Solutions Architect specialising in AI/ML workloads.
23+
You have deep expertise in Amazon Bedrock, SageMaker, ECS, and Lambda,
24+
and you help organisations deploy AI agents at scale securely and cost-effectively.
25+
llm:
26+
model: "bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0"
27+
api_type: "bedrock"
28+
aws_region: "us-east-1"
29+
tasks:
30+
architecture_task:
31+
description: |
32+
Design a cloud-native deployment strategy for AI agents on AWS for: {topic}
33+
34+
Cover:
35+
1. Recommended AWS services (Bedrock, ECS, Lambda, etc.)
36+
2. Scalability and cost optimisation patterns
37+
3. Security and compliance considerations
38+
4. A simple reference architecture overview
39+
expected_output: |
40+
A concise architecture guide with service recommendations,
41+
a high-level deployment diagram description, and key
42+
best practices for production AI agent deployments on AWS.

examples/ag2/ag2_multi_agent.yaml

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
framework: ag2
2+
topic: "The impact of open-source AI on enterprise software development"
3+
4+
# Install: pip install "praisonai[ag2]"
5+
# Run: praisonai --framework ag2 examples/ag2/ag2_multi_agent.yaml
6+
# or praisonai run examples/ag2/ag2_multi_agent.yaml --framework ag2
7+
#
8+
# This example demonstrates AG2's GroupChat multi-agent coordination.
9+
# Both agents participate in a collaborative conversation managed by
10+
# a GroupChatManager until the task is complete.
11+
12+
roles:
13+
researcher:
14+
role: "Research Specialist"
15+
goal: "Gather and analyse information on the given topic"
16+
backstory: |
17+
You are a meticulous researcher who excels at finding relevant
18+
information, analysing trends, and presenting data-backed insights.
19+
You always cite your reasoning and structure your findings clearly.
20+
tasks:
21+
research_task:
22+
description: |
23+
Research the topic: {topic}
24+
25+
Investigate:
26+
1. Current state and adoption rates
27+
2. Key players and projects driving the trend
28+
3. Technical advantages and challenges
29+
4. Business impact and cost implications
30+
expected_output: |
31+
A structured research briefing with findings on the topic,
32+
including key data points, trends, and technical observations.
33+
34+
writer:
35+
role: "Technical Content Writer"
36+
goal: "Transform research findings into clear, engaging written content"
37+
backstory: |
38+
You are a skilled technical writer who turns complex research into
39+
accessible, well-structured articles. You focus on clarity, logical
40+
flow, and actionable takeaways for a professional audience.
41+
tasks:
42+
writing_task:
43+
description: |
44+
Using the research findings provided by the Research Specialist,
45+
write a concise article on: {topic}
46+
47+
The article should:
48+
1. Open with a compelling hook
49+
2. Present key findings logically
50+
3. Include practical implications for developers
51+
4. Close with a forward-looking conclusion
52+
expected_output: |
53+
A 400-500 word article suitable for a technical blog,
54+
with clear sections, professional tone, and concrete takeaways.

src/praisonai/praisonai/agents_generator.py

Lines changed: 162 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,16 @@
5555
except ImportError:
5656
pass
5757

58+
AG2_AVAILABLE = False
59+
try:
60+
import importlib.metadata as _importlib_metadata
61+
_importlib_metadata.distribution('ag2')
62+
from autogen import LLMConfig as _AG2LLMConfig # noqa: F401 — AG2-exclusive class
63+
AG2_AVAILABLE = True
64+
del _AG2LLMConfig, _importlib_metadata
65+
except Exception:
66+
pass
67+
5868
try:
5969
import agentops
6070
AGENTOPS_AVAILABLE = True
@@ -65,7 +75,7 @@
6575
pass
6676

6777
# Only try to import praisonai_tools if either CrewAI or AutoGen is available
68-
if CREWAI_AVAILABLE or AUTOGEN_AVAILABLE or PRAISONAI_AVAILABLE:
78+
if CREWAI_AVAILABLE or AUTOGEN_AVAILABLE or PRAISONAI_AVAILABLE or AG2_AVAILABLE:
6979
try:
7080
from praisonai_tools import (
7181
CodeDocsSearchTool, CSVSearchTool, DirectorySearchTool, DOCXSearchTool, DirectoryReadTool,
@@ -216,6 +226,8 @@ def __init__(self, agent_file, framework, config_list, log_level=None, agent_cal
216226
raise ImportError("AutoGen is not installed. Please install it with 'pip install praisonai[autogen]' for v0.2 or 'pip install praisonai[autogen-v4]' for v0.4")
217227
elif framework == "praisonai" and not PRAISONAI_AVAILABLE:
218228
raise ImportError("PraisonAI is not installed. Please install it with 'pip install praisonaiagents'")
229+
elif framework == "ag2" and not AG2_AVAILABLE:
230+
raise ImportError("AG2 is not installed. Please install it with 'pip install praisonai[ag2]'")
219231

220232
def is_function_or_decorated(self, obj):
221233
"""
@@ -391,7 +403,7 @@ def generate_crew_and_kickoff(self):
391403
tools_dict = {}
392404

393405
# Only try to use praisonai_tools if it's available and needed
394-
if PRAISONAI_TOOLS_AVAILABLE and (CREWAI_AVAILABLE or AUTOGEN_AVAILABLE or PRAISONAI_AVAILABLE):
406+
if PRAISONAI_TOOLS_AVAILABLE and (CREWAI_AVAILABLE or AUTOGEN_AVAILABLE or PRAISONAI_AVAILABLE or AG2_AVAILABLE):
395407
tools_dict = {
396408
'CodeDocsSearchTool': CodeDocsSearchTool(),
397409
'CSVSearchTool': CSVSearchTool(),
@@ -462,6 +474,12 @@ def generate_crew_and_kickoff(self):
462474
else:
463475
self.logger.info("Using AutoGen v0.2")
464476
return self._run_autogen(config, topic, tools_dict)
477+
elif framework == "ag2":
478+
if not AG2_AVAILABLE:
479+
raise ImportError("AG2 is not installed. Please install it with 'pip install praisonai[ag2]'")
480+
if AGENTOPS_AVAILABLE:
481+
agentops.init(os.environ.get("AGENTOPS_API_KEY"), default_tags=["ag2"])
482+
return self._run_ag2(config, topic, tools_dict)
465483
elif framework == "praisonai":
466484
if not PRAISONAI_AVAILABLE:
467485
raise ImportError("PraisonAI is not installed. Please install it with 'pip install praisonaiagents'")
@@ -711,6 +729,148 @@ async def run_autogen_v4_async():
711729
self.logger.error(f"Error running AutoGen v0.4: {str(e)}")
712730
return f"### AutoGen v0.4 Error ###\n{str(e)}"
713731

732+
def _run_ag2(self, config, topic, tools_dict):
733+
"""
734+
Run agents using the AG2 framework (community fork of AutoGen, PyPI: ag2).
735+
736+
AG2 installs under the 'autogen' namespace — there is no 'import ag2'.
737+
Uses LLMConfig context manager + AssistantAgent + GroupChat pattern.
738+
739+
Args:
740+
config (dict): Configuration dictionary parsed from YAML
741+
topic (str): The topic/task to process
742+
tools_dict (dict): Dictionary of available tools
743+
744+
Returns:
745+
str: Result prefixed with '### AG2 Output ###'
746+
"""
747+
import re as _re
748+
from autogen import (
749+
AssistantAgent, UserProxyAgent, GroupChat, GroupChatManager, LLMConfig
750+
)
751+
752+
model_config = self.config_list[0] if self.config_list else {}
753+
754+
# Allow YAML top-level llm block to override config_list values
755+
yaml_llm = config.get("llm", {}) or {}
756+
# Also check first role's llm block as a fallback
757+
first_role_llm = {}
758+
for role_details in config.get("roles", {}).values():
759+
first_role_llm = role_details.get("llm", {}) or {}
760+
break
761+
762+
# Priority: YAML top-level llm > first role llm > config_list > env vars
763+
def _resolve(key, env_var=None, default=None):
764+
return (yaml_llm.get(key) or first_role_llm.get(key)
765+
or model_config.get(key)
766+
or (os.environ.get(env_var) if env_var else None)
767+
or default)
768+
769+
api_type = _resolve("api_type", default="openai").lower()
770+
model_name = _resolve("model", default="gpt-4o-mini")
771+
api_key = _resolve("api_key", env_var="OPENAI_API_KEY")
772+
base_url = (model_config.get("base_url")
773+
or yaml_llm.get("base_url")
774+
or os.environ.get("OPENAI_BASE_URL")
775+
or os.environ.get("OPENAI_API_BASE"))
776+
777+
# Build LLMConfig — Bedrock needs no api_key
778+
if api_type == "bedrock":
779+
llm_config_entry = {"api_type": "bedrock", "model": model_name}
780+
else:
781+
llm_config_entry = {"model": model_name}
782+
if api_key:
783+
llm_config_entry["api_key"] = api_key
784+
if base_url and base_url not in ("https://api.openai.com/v1", "https://api.openai.com/v1/"):
785+
llm_config_entry["base_url"] = base_url
786+
llm_config = LLMConfig(llm_config_entry)
787+
788+
user_proxy = UserProxyAgent(
789+
name="User",
790+
human_input_mode="NEVER",
791+
is_termination_msg=lambda x: "TERMINATE" in (x.get("content") or ""),
792+
code_execution_config=False,
793+
)
794+
795+
# Create one AssistantAgent per role
796+
ag2_agent_entries = []
797+
for role, details in config["roles"].items():
798+
agent_name = details.get("role", role).replace("{topic}", topic)
799+
backstory = details.get("backstory", "").replace("{topic}", topic)
800+
agent_name_safe = _re.sub(r"[^a-zA-Z0-9_\-]", "_", agent_name)
801+
assistant = AssistantAgent(
802+
name=agent_name_safe,
803+
system_message=backstory + "\nWhen the task is done, reply 'TERMINATE'.",
804+
llm_config=llm_config,
805+
)
806+
ag2_agent_entries.append((role, details, assistant))
807+
808+
# Register tools via AG2 decorator pattern
809+
for role, details, assistant in ag2_agent_entries:
810+
for tool_name in details.get("tools", []):
811+
tool = tools_dict.get(tool_name)
812+
if tool is None:
813+
continue
814+
func = tool if callable(tool) else getattr(tool, "run", None)
815+
if func is None:
816+
continue
817+
818+
def make_tool_fn(f):
819+
def tool_fn(**kwargs):
820+
return f(**kwargs) if callable(f) else str(f)
821+
tool_fn.__name__ = tool_name
822+
return tool_fn
823+
824+
wrapped = make_tool_fn(func)
825+
assistant.register_for_llm(description=f"Tool: {tool_name}")(wrapped)
826+
user_proxy.register_for_execution()(wrapped)
827+
828+
all_assistants = [a for _, _, a in ag2_agent_entries]
829+
if not all_assistants:
830+
return "### AG2 Output ###\nNo agents created from configuration."
831+
832+
# Build initial message from all task descriptions
833+
task_lines = []
834+
for role, details, _ in ag2_agent_entries:
835+
for task_name, task_details in details.get("tasks", {}).items():
836+
desc = task_details.get("description", "").replace("{topic}", topic)
837+
if desc:
838+
task_lines.append(desc)
839+
initial_message = "\n".join(task_lines) if task_lines else topic
840+
841+
groupchat = GroupChat(
842+
agents=[user_proxy] + all_assistants,
843+
messages=[],
844+
max_round=12,
845+
)
846+
manager = GroupChatManager(groupchat=groupchat, llm_config=llm_config)
847+
848+
try:
849+
chat_result = user_proxy.initiate_chat(manager, message=initial_message)
850+
except Exception as e:
851+
return f"### AG2 Error ###\n{str(e)}"
852+
853+
# Prefer ChatResult.summary if available, otherwise scan messages
854+
result_content = ""
855+
summary = getattr(chat_result, "summary", None)
856+
if summary and isinstance(summary, str) and summary.strip():
857+
result_content = _re.sub(r'[\s\.\,]*TERMINATE[\s\.\,]*$', '', summary, flags=_re.IGNORECASE).strip().rstrip('.')
858+
859+
if not result_content:
860+
for msg in reversed(groupchat.messages):
861+
if msg.get("name") == "User":
862+
continue
863+
content = (msg.get("content") or "").strip()
864+
if content:
865+
result_content = _re.sub(r'[\s\.\,]*TERMINATE[\s\.\,]*$', '', content, flags=_re.IGNORECASE).strip().rstrip('.')
866+
if result_content:
867+
break
868+
869+
if not result_content:
870+
result_content = "Task completed."
871+
872+
return f"### AG2 Output ###\n{result_content}"
873+
714874
def _run_crewai(self, config, topic, tools_dict):
715875
"""
716876
Run agents using the CrewAI framework.

src/praisonai/praisonai/auto.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,23 @@ def _check_autogen_v4_available() -> bool:
9191
return _autogen_v4_available
9292

9393

94+
# --- AG2 lazy loading ---
95+
_ag2_available = None
96+
97+
def _check_ag2_available() -> bool:
98+
"""Check if AG2 (community fork of AutoGen) is available (cached)."""
99+
global _ag2_available
100+
if _ag2_available is None:
101+
try:
102+
import importlib.metadata
103+
importlib.metadata.distribution('ag2')
104+
from autogen import LLMConfig # noqa: F401 — AG2-exclusive class
105+
_ag2_available = True
106+
except Exception:
107+
_ag2_available = False
108+
return _ag2_available
109+
110+
94111
def _get_autogen():
95112
"""Lazy load autogen module."""
96113
global _autogen_module
@@ -658,6 +675,11 @@ def __init__(self, topic="Movie Story writing about AI", agent_file="test.yaml",
658675
Praisonai is not installed. Please install with:
659676
pip install praisonaiagents
660677
""")
678+
elif framework == "ag2" and not _check_ag2_available():
679+
raise ImportError("""
680+
AG2 is not installed. Please install with:
681+
pip install "praisonai[ag2]"
682+
""")
661683

662684
# Only show tools message if using a framework and tools are needed
663685
if (framework in ["crewai", "autogen"]) and not _check_praisonai_tools_available():

src/praisonai/pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,7 @@ call = [
9191
train = []
9292
crewai = ["crewai>=0.157.0", "praisonai-tools>=0.1.0"]
9393
autogen = ["pyautogen==0.2.29", "praisonai-tools>=0.1.0", "crewai"]
94+
ag2 = ["ag2>=0.11.0", "praisonai-tools>=0.1.0"]
9495
autogen-v4 = [
9596
"autogen-agentchat>=0.4.0",
9697
"autogen-ext[openai]>=0.4.0",

src/praisonai/tests/integration/ag2/__init__.py

Whitespace-only changes.

0 commit comments

Comments
 (0)