Skip to content

Commit 74c4cec

Browse files
fix: resolve P1 backward compatibility and routing issues
- Fix ManagedAgent compute provider routing: maintain backward compatibility for e2b/modal/etc by routing to LocalManagedAgent with deprecation warning instead of ValueError (fixes hard breaking change) - Fix LocalAgent provider preservation: preserve provider value for LLM routing to maintain _resolve_model() prefix behavior (ollama/, gemini/) - Fix spurious deprecation warnings: auto-detected provider=local no longer triggers warnings for users who never passed provider= - Remove unused os import from hosted_agent.py - Improve error messages with provider-specific guidance - Fix test fragility with exact warning counts and identity checks Addresses all P1 issues identified by Greptile and CodeRabbit reviewers. Co-authored-by: Mervin Praison <MervinPraison@users.noreply.github.com>
1 parent 4fa14fa commit 74c4cec

4 files changed

Lines changed: 57 additions & 25 deletions

File tree

src/praisonai/praisonai/integrations/hosted_agent.py

Lines changed: 22 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@
2828
- Tools are co-located with the provider infrastructure
2929
"""
3030

31-
import os
3231
from typing import Optional, Any
3332
from .managed_agents import AnthropicManagedAgent, ManagedConfig
3433

@@ -64,11 +63,30 @@ def __init__(
6463
**kwargs,
6564
):
6665
if provider != "anthropic":
66+
# Provide differentiated guidance based on provider type
67+
_llm_hints = {"openai", "gemini", "ollama", "local"}
68+
_compute_hints = {"e2b", "modal", "flyio", "daytona", "docker"}
69+
70+
if provider in _llm_hints:
71+
hint = (
72+
f"For local agent loops with this LLM, use: "
73+
f"LocalAgent(config=LocalAgentConfig(model='...')) "
74+
f"(e.g. 'gpt-4o-mini', 'gemini/gemini-2.0-flash', 'ollama/llama3')."
75+
)
76+
elif provider in _compute_hints:
77+
hint = (
78+
f"For local execution with cloud compute, use: "
79+
f"LocalAgent(compute='{provider}', config=LocalAgentConfig(...))"
80+
)
81+
else:
82+
hint = (
83+
"Use LocalAgent(config=LocalAgentConfig(model='...')) for local loops, "
84+
"or LocalAgent(compute='e2b'|'modal'|'docker'|...) for cloud-sandboxed tools."
85+
)
86+
6787
raise ValueError(
6888
f"Managed runtime for provider '{provider}' is not yet available. "
69-
f"Currently supported: 'anthropic'. "
70-
f"For local execution with cloud compute, use: "
71-
f"LocalAgent(compute='{provider}', config=LocalAgentConfig(...))"
89+
f"Currently supported: 'anthropic'. {hint}"
7290
)
7391

7492
# Pass through to the existing Anthropic implementation

src/praisonai/praisonai/integrations/local_agent.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@ def __init__(
6767
**kwargs,
6868
):
6969
# Reject the provider= overload pattern to force clean usage
70+
provider_for_routing = "local" # Default provider for model routing
7071
if 'provider' in kwargs:
7172
provider_value = kwargs.pop('provider')
7273
warnings.warn(
@@ -77,6 +78,9 @@ def __init__(
7778
DeprecationWarning,
7879
stacklevel=2
7980
)
81+
# Preserve the provider value for LLM routing to maintain backward compatibility
82+
# This ensures _resolve_model() can still apply proper prefixes (ollama/, gemini/, etc.)
83+
provider_for_routing = provider_value
8084

81-
# Pass compute= as the compute parameter to the underlying implementation
82-
super().__init__(compute=compute, config=config, **kwargs)
85+
# Pass compute= as the compute parameter and provider for LLM routing
86+
super().__init__(compute=compute, config=config, provider=provider_for_routing, **kwargs)

src/praisonai/praisonai/integrations/managed_agents.py

Lines changed: 19 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1097,7 +1097,9 @@ def ManagedAgent(
10971097
Raises:
10981098
ValueError: For compute-provider names that should use LocalAgent(compute=).
10991099
"""
1100-
if provider is None:
1100+
# Track if provider was auto-detected to avoid spurious deprecation warnings
1101+
auto_detected = provider is None
1102+
if auto_detected:
11011103
# Auto-detect
11021104
if os.getenv("ANTHROPIC_API_KEY") or os.getenv("CLAUDE_API_KEY"):
11031105
provider = "anthropic"
@@ -1108,24 +1110,28 @@ def ManagedAgent(
11081110
if provider == "anthropic":
11091111
return AnthropicManagedAgent(provider=provider, **kwargs)
11101112

1111-
# Compute provider names should use LocalAgent(compute=)
1113+
# Compute provider names - maintain backward compatibility by passing to LocalManagedAgent
11121114
elif provider in {"e2b", "modal", "flyio", "daytona", "docker"}:
1113-
raise ValueError(
1114-
f"Cloud compute provider '{provider}' belongs on LocalAgent(compute='{provider}', ...). "
1115-
f"Hosted runtimes for these providers are not yet available. "
1116-
f"Use: LocalAgent(compute='{provider}', config=LocalAgentConfig(model='gpt-4o-mini'))"
1117-
)
1118-
1119-
# LLM routing hints (deprecated usage)
1120-
elif provider in {"openai", "gemini", "ollama", "local"}:
11211115
warnings.warn(
1122-
f"ManagedAgent(provider='{provider}') is deprecated. "
1123-
f"Use LocalAgent directly with model= instead: "
1124-
f"LocalAgent(config=LocalAgentConfig(model='gpt-4o-mini'))",
1116+
f"ManagedAgent(provider='{provider}') for compute providers is deprecated. "
1117+
f"Use LocalAgent(compute='{provider}', config=LocalAgentConfig(...)) instead.",
11251118
DeprecationWarning,
11261119
stacklevel=2
11271120
)
11281121
from .managed_local import LocalManagedAgent
1122+
return LocalManagedAgent(provider="local", compute=provider, **kwargs)
1123+
1124+
# LLM routing hints (deprecated usage) - only warn if explicitly passed by user
1125+
elif provider in {"openai", "gemini", "ollama", "local"}:
1126+
if not auto_detected:
1127+
warnings.warn(
1128+
f"ManagedAgent(provider='{provider}') is deprecated. "
1129+
f"Use LocalAgent directly with model= instead: "
1130+
f"LocalAgent(config=LocalAgentConfig(model='gpt-4o-mini'))",
1131+
DeprecationWarning,
1132+
stacklevel=2
1133+
)
1134+
from .managed_local import LocalManagedAgent
11291135
return LocalManagedAgent(provider=provider, **kwargs)
11301136

11311137
# Unknown provider

tests/unit/integrations/test_backend_semantics.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -68,10 +68,14 @@ def test_local_agent_rejects_provider_overload():
6868
with warnings.catch_warnings(record=True) as w:
6969
warnings.simplefilter("always")
7070
LocalAgent(provider="openai", config=LocalAgentConfig(model="gpt-4o-mini"))
71-
assert len(w) == 1
72-
assert issubclass(w[0].category, DeprecationWarning)
73-
assert "provider=" in str(w[0].message)
74-
assert "config.model=" in str(w[0].message)
71+
# Filter to only DeprecationWarning containing provider= to avoid false positives
72+
dep_warnings = [
73+
rec for rec in w
74+
if issubclass(rec.category, DeprecationWarning)
75+
and "provider=" in str(rec.message)
76+
and "config.model=" in str(rec.message)
77+
]
78+
assert len(dep_warnings) == 1, f"Expected 1 provider= deprecation warning, got {len(dep_warnings)} from {len(w)} total warnings"
7579

7680

7781
def test_managed_agent_deprecation_warnings():
@@ -171,10 +175,10 @@ def test_config_aliases():
171175
from praisonai.integrations.managed_local import LocalManagedConfig
172176

173177
# HostedAgentConfig should alias ManagedConfig
174-
assert HostedAgentConfig == ManagedConfig
178+
assert HostedAgentConfig is ManagedConfig
175179

176180
# LocalAgentConfig should alias LocalManagedConfig
177-
assert LocalAgentConfig == LocalManagedConfig
181+
assert LocalAgentConfig is LocalManagedConfig
178182

179183

180184
def test_unknown_provider_error():

0 commit comments

Comments
 (0)