Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
118 changes: 110 additions & 8 deletions src/strands/experimental/agent_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,39 @@
agent = config_to_agent("config.json")
# Add tools that need code-based instantiation
agent.tool_registry.process_tools([ToolWithConfigArg(HttpsConnection("localhost"))])

The ``model`` field supports two formats:

**String format (backward compatible — defaults to Bedrock):**
{"model": "us.anthropic.claude-sonnet-4-20250514-v1:0"}

**Object format (supports all providers):**
{
"model": {
"provider": "anthropic",
"model_id": "claude-sonnet-4-20250514",
"max_tokens": 10000,
"client_args": {"api_key": "..."}
}
}

Note: The following constructor parameters cannot be specified from JSON because they require
code-based instantiation: ``boto_session`` (Bedrock, SageMaker), ``client`` (OpenAI, Gemini),
``gemini_tools`` (Gemini). Use ``region_name`` / ``client_args`` as JSON-friendly alternatives.
"""

from __future__ import annotations

import json
from pathlib import Path
from typing import Any
from typing import TYPE_CHECKING, Any

import jsonschema
from jsonschema import ValidationError

if TYPE_CHECKING:
from ..models.model import Model

# JSON Schema for agent configuration
AGENT_CONFIG_SCHEMA = {
"$schema": "http://json-schema.org/draft-07/schema#",
Expand All @@ -27,8 +51,25 @@
"properties": {
"name": {"description": "Name of the agent", "type": ["string", "null"], "default": None},
"model": {
"description": "The model ID to use for this agent. If not specified, uses the default model.",
"type": ["string", "null"],
"description": (
"The model to use for this agent. Can be a string (Bedrock model_id) "
"or an object with a 'provider' field for any supported provider."
),
"oneOf": [
{"type": "string"},
{"type": "null"},
{
"type": "object",
"properties": {
"provider": {
"description": "The model provider name",
"type": "string",
}
},
"required": ["provider"],
"additionalProperties": True,
},
],
"default": None,
},
"prompt": {
Expand All @@ -50,8 +91,55 @@
# Pre-compile validator for better performance
_VALIDATOR = jsonschema.Draft7Validator(AGENT_CONFIG_SCHEMA)

# Provider name to model class name — resolved via strands.models lazy __getattr__
PROVIDER_MAP: dict[str, str] = {
Comment thread
Unshure marked this conversation as resolved.
"bedrock": "BedrockModel",
"anthropic": "AnthropicModel",
"openai": "OpenAIModel",
"gemini": "GeminiModel",
"ollama": "OllamaModel",
"litellm": "LiteLLMModel",
"mistral": "MistralModel",
"llamaapi": "LlamaAPIModel",
"llamacpp": "LlamaCppModel",
"sagemaker": "SageMakerAIModel",
"writer": "WriterModel",
"openai_responses": "OpenAIResponsesModel",
}


def config_to_agent(config: str | dict[str, Any], **kwargs: dict[str, Any]) -> Any:
def _create_model_from_dict(model_config: dict[str, Any]) -> Model:
"""Create a Model instance from a provider config dict.

Routes the config to the appropriate model class based on the ``provider`` field,
then delegates to the class's ``from_dict`` method. All imports are lazy to avoid
requiring optional dependencies that are not installed.

Args:
model_config: Dict containing at least a ``provider`` key and provider-specific params.

Returns:
A configured Model instance for the specified provider.

Raises:
ValueError: If the provider name is not recognized.
ImportError: If the provider's optional dependencies are not installed.
"""
config = model_config.copy()
provider = config.pop("provider")

class_name = PROVIDER_MAP.get(provider)
if class_name is None:
supported = ", ".join(sorted(PROVIDER_MAP.keys()))
raise ValueError(f"Unknown model provider: '{provider}'. Supported providers: {supported}")

from .. import models

model_cls: type[Model] = getattr(models, class_name)
return model_cls.from_dict(config)


def config_to_agent(config: str | dict[str, Any], **kwargs: Any) -> Any:
"""Create an Agent from a configuration file or dictionary.

This function supports tools that can be loaded declaratively (file paths, module names,
Expand Down Expand Up @@ -83,6 +171,12 @@ def config_to_agent(config: str | dict[str, Any], **kwargs: dict[str, Any]) -> A
Create agent from dictionary:
>>> config = {"model": "anthropic.claude-3-5-sonnet-20241022-v2:0", "tools": ["calculator"]}
>>> agent = config_to_agent(config)

Create agent with object model config:
>>> config = {
... "model": {"provider": "openai", "model_id": "gpt-4o", "client_args": {"api_key": "..."}}
... }
>>> agent = config_to_agent(config)
"""
# Parse configuration
if isinstance(config, str):
Expand Down Expand Up @@ -114,11 +208,19 @@ def config_to_agent(config: str | dict[str, Any], **kwargs: dict[str, Any]) -> A
raise ValueError(f"Configuration validation error at {error_path}: {e.message}") from e

# Prepare Agent constructor arguments
agent_kwargs = {}

# Map configuration keys to Agent constructor parameters
agent_kwargs: dict[str, Any] = {}

# Handle model field — string vs object format
model_value = config_dict.get("model")
if isinstance(model_value, dict):
# Object format: create Model instance via factory
agent_kwargs["model"] = _create_model_from_dict(model_value)
elif model_value is not None:
# String format (backward compat): pass directly as model_id to Agent
agent_kwargs["model"] = model_value

# Map remaining configuration keys to Agent constructor parameters
config_mapping = {
"model": "model",
"prompt": "system_prompt",
"tools": "tools",
"name": "name",
Expand Down
28 changes: 28 additions & 0 deletions src/strands/models/bedrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,34 @@ class BedrockConfig(TypedDict, total=False):
temperature: float | None
top_p: float | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "BedrockModel":
"""Create a BedrockModel from a configuration dictionary.

Handles extraction of ``region_name``, ``endpoint_url``, and conversion of
``boto_client_config`` from a plain dict to ``botocore.config.Config``.

Args:
config: Model configuration dictionary. A copy is made internally;
the caller's dict is not modified.

Returns:
A configured BedrockModel instance.
"""
config = config.copy()
kwargs: dict[str, Any] = {}

if "region_name" in config:
kwargs["region_name"] = config.pop("region_name")
if "endpoint_url" in config:
kwargs["endpoint_url"] = config.pop("endpoint_url")
if "boto_client_config" in config:
raw = config.pop("boto_client_config")
kwargs["boto_client_config"] = BotocoreConfig(**raw) if isinstance(raw, dict) else raw

kwargs.update(config)
return cls(**kwargs)

def __init__(
self,
*,
Expand Down
22 changes: 22 additions & 0 deletions src/strands/models/llamacpp.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,28 @@ class LlamaCppConfig(TypedDict, total=False):
model_id: str
params: dict[str, Any] | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "LlamaCppModel":
"""Create a LlamaCppModel from a configuration dictionary.

Handles extraction of ``base_url`` and ``timeout`` as separate constructor parameters.

Args:
config: Model configuration dictionary. A copy is made internally;
the caller's dict is not modified.

Returns:
A configured LlamaCppModel instance.
"""
config = config.copy()
kwargs: dict[str, Any] = {}
if "base_url" in config:
kwargs["base_url"] = config.pop("base_url")
if "timeout" in config:
kwargs["timeout"] = config.pop("timeout")
kwargs.update(config)
return cls(**kwargs)

def __init__(
self,
base_url: str = "http://localhost:8080",
Expand Down
24 changes: 24 additions & 0 deletions src/strands/models/mistral.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,30 @@ class MistralConfig(TypedDict, total=False):
top_p: float | None
stream: bool | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "MistralModel":
"""Create a MistralModel from a configuration dictionary.

Handles extraction of ``api_key`` and ``client_args`` as separate constructor parameters.

Args:
config: Model configuration dictionary. A copy is made internally;
the caller's dict is not modified.

Returns:
A configured MistralModel instance.
"""
config = config.copy()
api_key = config.pop("api_key", None)
client_args = config.pop("client_args", None)
kwargs: dict[str, Any] = {}
if api_key is not None:
kwargs["api_key"] = api_key
if client_args is not None:
kwargs["client_args"] = client_args
kwargs.update(config)
return cls(**kwargs)

def __init__(
self,
api_key: str | None = None,
Expand Down
25 changes: 25 additions & 0 deletions src/strands/models/model.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
"""Abstract base class for Agent model providers."""

from __future__ import annotations

import abc
import logging
from collections.abc import AsyncGenerator, AsyncIterable
Expand Down Expand Up @@ -51,6 +53,29 @@ def stateful(self) -> bool:
"""
return False

@classmethod
def from_dict(cls, config: dict[str, Any]) -> Model:
"""Create a Model instance from a configuration dictionary.

The default implementation extracts ``client_args`` (if present) and passes
all remaining keys as keyword arguments to the constructor. Subclasses with
non-standard constructor signatures should override this method.

Args:
config: Provider-specific configuration dictionary. A copy is made internally;
the caller's dict is not modified.

Returns:
A configured Model instance.
"""
config = config.copy()
client_args = config.pop("client_args", None)
kwargs: dict[str, Any] = {}
if client_args is not None:
kwargs["client_args"] = client_args
kwargs.update(config)
return cls(**kwargs)

@abc.abstractmethod
# pragma: no cover
def update_config(self, **model_config: Any) -> None:
Expand Down
23 changes: 23 additions & 0 deletions src/strands/models/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,29 @@ class OllamaConfig(TypedDict, total=False):
temperature: float | None
top_p: float | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "OllamaModel":
"""Create an OllamaModel from a configuration dictionary.

Handles extraction of ``host`` as a positional argument and mapping of
``client_args`` to the ``ollama_client_args`` constructor parameter.

Args:
config: Model configuration dictionary. A copy is made internally;
the caller's dict is not modified.

Returns:
A configured OllamaModel instance.
"""
config = config.copy()
host = config.pop("host", None)
client_args = config.pop("client_args", None)
kwargs: dict[str, Any] = {}
if client_args is not None:
kwargs["ollama_client_args"] = client_args
kwargs.update(config)
return cls(host, **kwargs)

def __init__(
self,
host: str | None,
Expand Down
26 changes: 26 additions & 0 deletions src/strands/models/sagemaker.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,32 @@ class SageMakerAIEndpointConfig(TypedDict, total=False):
target_variant: str | None | None
additional_args: dict[str, Any] | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "SageMakerAIModel":
"""Create a SageMakerAIModel from a configuration dictionary.

Handles extraction of ``endpoint_config``, ``payload_config``, and conversion of
``boto_client_config`` from a plain dict to ``botocore.config.Config``.

Args:
config: Model configuration dictionary. A copy is made internally;
the caller's dict is not modified.

Returns:
A configured SageMakerAIModel instance.
"""
config = config.copy()
kwargs: dict[str, Any] = {}
kwargs["endpoint_config"] = config.pop("endpoint_config", {})
kwargs["payload_config"] = config.pop("payload_config", {})
if "boto_client_config" in config:
raw = config.pop("boto_client_config")
kwargs["boto_client_config"] = BotocoreConfig(**raw) if isinstance(raw, dict) else raw
if config:
unexpected = ", ".join(sorted(config.keys()))
raise ValueError(f"Unsupported SageMaker config keys: {unexpected}")
Comment thread
Unshure marked this conversation as resolved.
return cls(**kwargs)

def __init__(
self,
endpoint_config: SageMakerAIEndpointConfig,
Expand Down
Loading
Loading