diff --git a/.env.example b/.env.example index e4fbd30..6397f84 100644 --- a/.env.example +++ b/.env.example @@ -50,3 +50,13 @@ SGR__PROMPTS__CLARIFICATION_RESPONSE_FILE=path/to/your/clarification_response.tx # ======================================================= # Note: MCP configuration is complex and better suited for config.yaml # See config.yaml.example for MCP server configuration examples + +# ======================================================= +# Observability: Langfuse integration +# ======================================================= +SGR__LANGFUSE__ENABLED=false +# SGR__LANGFUSE__PUBLIC_KEY=pk-lf-xxx +# SGR__LANGFUSE__SECRET_KEY=sk-lf-xxx +# SGR__LANGFUSE__HOST=http://localhost:3000 + +# Shorthand (credentials via LANGFUSE_* env vars): SGR__LANGFUSE=true diff --git a/config.yaml.example b/config.yaml.example index 3fd67fe..bae3248 100644 --- a/config.yaml.example +++ b/config.yaml.example @@ -10,6 +10,15 @@ llm: temperature: 0.4 # Temperature (0.0-1.0) # proxy: "socks5://127.0.0.1:1081" # Optional proxy (socks5:// or http://) +# Observability (Langfuse) +# When enabled, AgentFactory will create Langfuse AsyncOpenAI client instead of standard AsyncOpenAI. +# Credentials can be set here or via LANGFUSE_PUBLIC_KEY / LANGFUSE_SECRET_KEY / LANGFUSE_HOST env vars. +langfuse: + enabled: false + # public_key: "pk-lf-xxx" + # secret_key: "sk-lf-xxx" + # host: "http://localhost:3000" + # Execution Settings execution: max_clarifications: 3 # Max clarification requests diff --git a/docs/en/framework/configuration.md b/docs/en/framework/configuration.md index 3b67c26..0f657ce 100644 --- a/docs/en/framework/configuration.md +++ b/docs/en/framework/configuration.md @@ -67,6 +67,27 @@ config = GlobalConfig.from_yaml("config.yaml") An example can be found in [`config.yaml.example`](https://github.com/vamplabAI/sgr-agent-core/blob/main/config.yaml.example). +### Observability and Langfuse integration + +SGR Agent Core supports optional [Langfuse](https://langfuse.com) integration for LLM tracing: + +```yaml +langfuse: + enabled: true + public_key: "pk-lf-..." + secret_key: "sk-lf-..." + host: "https://cloud.langfuse.com" # or your self-hosted URL +``` + +Shorthand (when credentials are already in `LANGFUSE_*` env vars): + +```yaml +langfuse: true +``` + +See the [Langfuse integration guide](langfuse.md) for all connection scenarios +(Langfuse Cloud, self-hosted, LiteLLM proxy), environment variable reference, and troubleshooting. + ### Parameter Override **Key Feature:** `AgentDefinition` inherits all parameters from `GlobalConfig` and overrides only those explicitly specified. This allows creating minimal configurations by specifying only necessary changes. diff --git a/docs/en/framework/langfuse.md b/docs/en/framework/langfuse.md new file mode 100644 index 0000000..1a9c5e2 --- /dev/null +++ b/docs/en/framework/langfuse.md @@ -0,0 +1,176 @@ +# Langfuse Integration + +[Langfuse](https://langfuse.com) is an open-source observability platform for LLM applications. +When enabled, SGR Agent Core wraps the OpenAI client with Langfuse tracing — every LLM call +is automatically recorded as a trace with inputs, outputs, latency, and token usage. + +## Quick Start + +Add the `langfuse` block to your `config.yaml`: + +```yaml +langfuse: + enabled: true + public_key: "pk-lf-..." + secret_key: "sk-lf-..." + host: "https://cloud.langfuse.com" # or your self-hosted URL +``` + +That's it. On the next agent run you will see traces appearing in the Langfuse UI. + +--- + +## Connection Scenarios + +### Option 1: Langfuse Cloud + +The simplest option — use the managed service at [cloud.langfuse.com](https://cloud.langfuse.com). + +1. Sign up at [cloud.langfuse.com](https://cloud.langfuse.com) and create a project. +2. Copy **Public Key** and **Secret Key** from *Project Settings → API Keys*. +3. Add to `config.yaml`: + +```yaml +langfuse: + enabled: true + public_key: "pk-lf-..." + secret_key: "sk-lf-..." + host: "https://cloud.langfuse.com" +``` + +!!! note + `host` defaults to `https://cloud.langfuse.com` in the Langfuse SDK, so you can omit it + for the cloud deployment. It is shown here explicitly for clarity. + +--- + +### Option 2: Self-Hosted Langfuse + +Run Langfuse on your own infrastructure using the official Docker Compose setup. + +1. Follow the [self-hosting guide](https://langfuse.com/docs/deployment/self-host) to start Langfuse locally: + +```bash +git clone https://github.com/langfuse/langfuse.git +cd langfuse +docker compose up -d +``` + +Langfuse will be available at `http://localhost:3000` by default. + +2. Open `http://localhost:3000`, create a project, and copy the API keys. + +3. Point SGR at your instance: + +```yaml +langfuse: + enabled: true + public_key: "pk-lf-..." + secret_key: "sk-lf-..." + host: "http://localhost:3000" +``` + +!!! tip + For production deployments, replace `localhost` with the hostname or IP of your Langfuse server + (e.g. `https://langfuse.internal.example.com`). + +--- + +### Option 3: Via LiteLLM Proxy + +[LiteLLM](https://docs.litellm.ai/) can act as a unified proxy in front of multiple LLM providers +and forward traces to Langfuse automatically. + +**Request flow:** + +``` +SGR Agent → LiteLLM Proxy → LLM Provider (OpenAI, etc.) + ↓ + Langfuse +``` + +1. Configure LiteLLM to forward traces to Langfuse. + In your LiteLLM `config.yaml`, add the Langfuse callback: + +```yaml +# litellm/config.yaml +litellm_settings: + success_callback: ["langfuse"] + +environment_variables: + LANGFUSE_PUBLIC_KEY: "pk-lf-..." + LANGFUSE_SECRET_KEY: "sk-lf-..." + LANGFUSE_HOST: "https://cloud.langfuse.com" +``` + +2. In SGR's `config.yaml`, point the LLM base URL at your LiteLLM proxy and **disable** the + SGR-level Langfuse integration (LiteLLM handles tracing itself): + +```yaml +llm: + api_key: "your-litellm-api-key" + base_url: "http://localhost:4000" # LiteLLM proxy address + model: "gpt-4o" + +langfuse: + enabled: false # LiteLLM handles tracing +``` + +!!! note + Alternatively, you can enable Langfuse in SGR **and** in LiteLLM at the same time — + you will get two levels of tracing (SGR-side LLM calls + LiteLLM-side routing). + In most cases, one level is sufficient. + +--- + +## Environment Variables + +All `langfuse` config fields can be set via environment variables using the `SGR__LANGFUSE__*` +prefix: + +```bash +SGR__LANGFUSE__ENABLED=true +SGR__LANGFUSE__PUBLIC_KEY=pk-lf-xxx +SGR__LANGFUSE__SECRET_KEY=sk-lf-xxx +SGR__LANGFUSE__HOST=http://localhost:3000 +``` + +Alternatively, when credentials are already in native Langfuse environment variables, use the +shorthand to enable SGR integration without duplicating credentials: + +```bash +# These are read directly by the Langfuse SDK +LANGFUSE_PUBLIC_KEY=pk-lf-xxx +LANGFUSE_SECRET_KEY=sk-lf-xxx +LANGFUSE_HOST=http://localhost:3000 +``` + +```yaml +# config.yaml — shorthand, credentials come from LANGFUSE_* env vars +langfuse: true +``` + +!!! warning "Loading `.env` files" + Environment variables in `.env` are **not** loaded automatically by Python. + SGR's server (`sgr` CLI) loads `.env` on startup via `python-dotenv`. + If you run SGR as a library, call `load_dotenv()` yourself before initializing `GlobalConfig`. + +--- + +## Troubleshooting + +### `LangfuseImportError` / cannot import `langfuse` + +If `langfuse.enabled` is `true` in configuration but the `langfuse` package is not installed +or cannot be imported, agent startup raises `LangfuseImportError` with a clear message. +Install the project dependencies (`langfuse` is a core dependency of SGR Agent Core) or +disable Langfuse by setting `langfuse.enabled` to `false`. + +### "Authentication error: Langfuse client initialized without public_key" + +The Langfuse SDK cannot find credentials. Check the following: + +- `public_key` and `secret_key` are set in `config.yaml` under `langfuse:`, **or** + `LANGFUSE_PUBLIC_KEY` / `LANGFUSE_SECRET_KEY` are present in the environment. +- If using `.env`, make sure the server was started via the `sgr` CLI (which loads `.env`) + rather than called directly as a Python module. diff --git a/docs/ru/framework/configuration.md b/docs/ru/framework/configuration.md index 06362a7..24d23d5 100644 --- a/docs/ru/framework/configuration.md +++ b/docs/ru/framework/configuration.md @@ -68,6 +68,26 @@ config = GlobalConfig.from_yaml("config.yaml") Пример можно найти в [`config.yaml.example`](https://github.com/vamplabAI/sgr-agent-core/blob/main/config.yaml.example). +### Наблюдаемость и интеграция с Langfuse + +SGR Agent Core поддерживает опциональную интеграцию с [Langfuse](https://langfuse.com) для трассировки LLM-вызовов: + +```yaml +langfuse: + enabled: true + public_key: "pk-lf-..." + secret_key: "sk-lf-..." + host: "https://cloud.langfuse.com" # или ваш self-hosted URL +``` + +Сокращённая форма (когда ключи уже заданы в `LANGFUSE_*` env): + +```yaml +langfuse: true +``` + +Подробнее о всех сценариях подключения (Langfuse Cloud, self-hosted, LiteLLM proxy), +переменных окружения и решении проблем — в [руководстве по интеграции с Langfuse](langfuse.md). ### Переопределение параметров diff --git a/docs/ru/framework/langfuse.md b/docs/ru/framework/langfuse.md new file mode 100644 index 0000000..f6f34df --- /dev/null +++ b/docs/ru/framework/langfuse.md @@ -0,0 +1,178 @@ +# Интеграция с Langfuse + +[Langfuse](https://langfuse.com) — open-source платформа для наблюдаемости LLM-приложений. +При включении SGR Agent Core оборачивает OpenAI-клиент трассировкой Langfuse — каждый вызов +LLM автоматически записывается как трейс с входными данными, ответом, латентностью и +количеством токенов. + +## Быстрый старт + +Добавьте блок `langfuse` в ваш `config.yaml`: + +```yaml +langfuse: + enabled: true + public_key: "pk-lf-..." + secret_key: "sk-lf-..." + host: "https://cloud.langfuse.com" # или ваш self-hosted URL +``` + +Готово. При следующем запуске агента трейсы появятся в интерфейсе Langfuse. + +--- + +## Сценарии подключения + +### Вариант 1: Langfuse Cloud + +Самый простой вариант — использовать облачный сервис [cloud.langfuse.com](https://cloud.langfuse.com). + +1. Зарегистрируйтесь на [cloud.langfuse.com](https://cloud.langfuse.com) и создайте проект. +2. Скопируйте **Public Key** и **Secret Key** из *Project Settings → API Keys*. +3. Добавьте в `config.yaml`: + +```yaml +langfuse: + enabled: true + public_key: "pk-lf-..." + secret_key: "sk-lf-..." + host: "https://cloud.langfuse.com" +``` + +!!! note + По умолчанию Langfuse SDK использует `https://cloud.langfuse.com`, поэтому для облачного + варианта `host` можно опустить. Здесь он указан явно для наглядности. + +--- + +### Вариант 2: Self-Hosted Langfuse + +Запустите Langfuse на собственной инфраструктуре с помощью официального Docker Compose. + +1. Следуйте [инструкции по self-hosting](https://langfuse.com/docs/deployment/self-host) + для запуска Langfuse локально: + +```bash +git clone https://github.com/langfuse/langfuse.git +cd langfuse +docker compose up -d +``` + +По умолчанию Langfuse будет доступен по адресу `http://localhost:3000`. + +2. Откройте `http://localhost:3000`, создайте проект и скопируйте API-ключи. + +3. Укажите адрес вашего инстанса в SGR: + +```yaml +langfuse: + enabled: true + public_key: "pk-lf-..." + secret_key: "sk-lf-..." + host: "http://localhost:3000" +``` + +!!! tip + Для production-развёртываний замените `localhost` на hostname или IP вашего сервера Langfuse + (например, `https://langfuse.internal.example.com`). + +--- + +### Вариант 3: Через LiteLLM Proxy + +[LiteLLM](https://docs.litellm.ai/) может выступать единым прокси перед несколькими LLM-провайдерами +и автоматически форвардить трейсы в Langfuse. + +**Поток запросов:** + +``` +SGR Agent → LiteLLM Proxy → LLM-провайдер (OpenAI и др.) + ↓ + Langfuse +``` + +1. Настройте LiteLLM для передачи трейсов в Langfuse. + В `config.yaml` LiteLLM добавьте callback: + +```yaml +# litellm/config.yaml +litellm_settings: + success_callback: ["langfuse"] + +environment_variables: + LANGFUSE_PUBLIC_KEY: "pk-lf-..." + LANGFUSE_SECRET_KEY: "sk-lf-..." + LANGFUSE_HOST: "https://cloud.langfuse.com" +``` + +2. В `config.yaml` SGR укажите base URL на ваш LiteLLM-прокси и **отключите** интеграцию + Langfuse на уровне SGR (трассировкой занимается LiteLLM): + +```yaml +llm: + api_key: "your-litellm-api-key" + base_url: "http://localhost:4000" # адрес LiteLLM proxy + model: "gpt-4o" + +langfuse: + enabled: false # LiteLLM берёт трассировку на себя +``` + +!!! note + При желании можно включить Langfuse и в SGR, и в LiteLLM одновременно — вы получите + два уровня трейсов (LLM-вызовы на стороне SGR + маршрутизация на стороне LiteLLM). + В большинстве случаев достаточно одного уровня. + +--- + +## Переменные окружения + +Все параметры блока `langfuse` можно задать через переменные окружения с префиксом `SGR__LANGFUSE__`: + +```bash +SGR__LANGFUSE__ENABLED=true +SGR__LANGFUSE__PUBLIC_KEY=pk-lf-xxx +SGR__LANGFUSE__SECRET_KEY=sk-lf-xxx +SGR__LANGFUSE__HOST=http://localhost:3000 +``` + +Если ключи уже заданы в нативных переменных Langfuse, используйте сокращённую форму — без +дублирования ключей: + +```bash +# Эти переменные читаются непосредственно Langfuse SDK +LANGFUSE_PUBLIC_KEY=pk-lf-xxx +LANGFUSE_SECRET_KEY=sk-lf-xxx +LANGFUSE_HOST=http://localhost:3000 +``` + +```yaml +# config.yaml — сокращённая форма, ключи из LANGFUSE_* env +langfuse: true +``` + +!!! warning "Загрузка `.env`-файлов" + Python **не** загружает `.env` автоматически. + Сервер SGR (CLI `sgr`) загружает `.env` при старте через `python-dotenv`. + Если вы используете SGR как библиотеку, вызовите `load_dotenv()` самостоятельно + до инициализации `GlobalConfig`. + +--- + +## Решение проблем + +### `LangfuseImportError` / не удаётся импортировать `langfuse` + +Если в конфигурации `langfuse.enabled: true`, но пакет `langfuse` не установлен или недоступен +для импорта, при старте агента выбрасывается `LangfuseImportError` с понятным текстом. +Установите зависимости проекта (`langfuse` входит в основные зависимости SGR Agent Core) либо +отключите Langfuse, установив `langfuse.enabled` в `false`. + +### "Authentication error: Langfuse client initialized without public_key" + +Langfuse SDK не может найти ключи. Проверьте следующее: + +- `public_key` и `secret_key` указаны в `config.yaml` в блоке `langfuse:`, **либо** + `LANGFUSE_PUBLIC_KEY` / `LANGFUSE_SECRET_KEY` присутствуют в окружении. +- Если используете `.env`, убедитесь что сервер запущен через CLI `sgr` (он загружает `.env`), + а не напрямую как Python-модуль. diff --git a/mkdocs.yml b/mkdocs.yml index e4c386e..df650c7 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -30,6 +30,7 @@ plugins: Using as Library: Использование как библиотека API Server Quick Start: Быстрый старт API сервера Configuration: Конфигурация + Langfuse: Langfuse Build your agent: Собери своего агента Workflow: Рабочий процесс Demonstration: Демонстрация @@ -83,6 +84,7 @@ nav: - User Guide: - Using as Library: framework/first-steps.md - Configuration: framework/configuration.md + - Langfuse: framework/langfuse.md - Build your agent: framework/agents.md - Tools: framework/tools.md - QnA: framework/qna.md diff --git a/pyproject.toml b/pyproject.toml index 28108bd..5f78319 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,6 +48,8 @@ dependencies = [ "jambo>=0.1.3.post2", # Tools filtering "rank-bm25>=0.2.2", + # Observability + "langfuse>=4.0.0", ] [project.urls] diff --git a/sgr_agent_core/agent_definition.py b/sgr_agent_core/agent_definition.py index 4397182..a3659a5 100644 --- a/sgr_agent_core/agent_definition.py +++ b/sgr_agent_core/agent_definition.py @@ -144,6 +144,33 @@ class ExecutionConfig(BaseModel, extra="allow"): reports_dir: str = Field(default="reports", description="Directory for saving reports") +class LangfuseConfig(BaseModel): + """Langfuse observability configuration.""" + + enabled: bool = Field(default=False, description="Enable Langfuse integration") + public_key: str | None = Field(default=None, description="Langfuse public key (pk-lf-...)") + secret_key: str | None = Field(default=None, description="Langfuse secret key (sk-lf-...)") + host: str | None = Field(default=None, description="Langfuse host URL (e.g. http://localhost:3000)") + + @field_validator("public_key", "secret_key", "host", mode="before") + @classmethod + def empty_str_to_none(cls, v: Any) -> Any: + """Treat empty strings as unset so SDK kwargs omit them.""" + if v == "": + return None + return v + + def has_explicit_sdk_fields(self) -> bool: + """Return True if any credential or host is set for explicit Langfuse + SDK init.""" + return bool(self.to_langfuse_client_kwargs()) + + def to_langfuse_client_kwargs(self) -> dict[str, str]: + """Build kwargs for ``Langfuse(...)`` with only non-empty fields.""" + raw = self.model_dump(include={"public_key", "secret_key", "host"}, exclude_none=True) + return {k: v for k, v in raw.items() if isinstance(v, str) and v != ""} + + class AgentConfig(BaseModel, extra="allow"): """Agent configuration with all settings. @@ -151,11 +178,22 @@ class AgentConfig(BaseModel, extra="allow"): parameters (e.g., working_directory for file agents). """ + langfuse: LangfuseConfig = Field(default_factory=LangfuseConfig, description="Langfuse observability settings") llm: LLMConfig = Field(default_factory=LLMConfig, description="LLM settings") execution: ExecutionConfig = Field(default_factory=ExecutionConfig, description="Execution settings") prompts: PromptsConfig = Field(default_factory=PromptsConfig, description="Prompts settings") mcp: MCPConfig = Field(default_factory=MCPConfig, description="MCP settings") + @field_validator("langfuse", mode="before") + @classmethod + def normalize_langfuse(cls, v): + """Accept bool shorthand: langfuse: true → LangfuseConfig(enabled=True).""" + if isinstance(v, bool): + return {"enabled": v} + if isinstance(v, str): + return {"enabled": v.lower() in ("true", "1", "yes")} + return v + class ToolDefinition(BaseModel, extra="allow"): """Definition of a custom tool. diff --git a/sgr_agent_core/agent_factory.py b/sgr_agent_core/agent_factory.py index 54deb21..f46c3e5 100644 --- a/sgr_agent_core/agent_factory.py +++ b/sgr_agent_core/agent_factory.py @@ -1,6 +1,7 @@ """Agent Factory for dynamic agent creation from definitions.""" import logging +from importlib import import_module from typing import Any, Type, TypeVar import httpx @@ -19,6 +20,11 @@ Agent = TypeVar("Agent", bound=BaseAgent) +class LangfuseImportError(RuntimeError): + """Raised when Langfuse is enabled in config but the ``langfuse`` package + cannot be imported.""" + + class AgentFactory: """Factory for creating agent instances from definitions. @@ -36,10 +42,27 @@ def _create_client(cls, llm_config: LLMConfig) -> AsyncOpenAI: Returns: Configured AsyncOpenAI client """ + config = GlobalConfig() client_kwargs = {"base_url": llm_config.base_url, "api_key": llm_config.api_key} if llm_config.proxy: client_kwargs["http_client"] = httpx.AsyncClient(proxy=llm_config.proxy) + if config.langfuse.enabled: + try: + lf_cfg = config.langfuse + if lf_cfg.has_explicit_sdk_fields(): + LangfuseClient = getattr(import_module("langfuse"), "Langfuse") + LangfuseClient(**lf_cfg.to_langfuse_client_kwargs()) + logger.info("Langfuse initialized with explicit credentials from config") + LangfuseAsyncOpenAI = getattr(import_module("langfuse.openai"), "AsyncOpenAI") + logger.info("Creating Langfuse AsyncOpenAI client (langfuse.enabled=True)") + return LangfuseAsyncOpenAI(**client_kwargs) + except ImportError as exc: + raise LangfuseImportError( + "Langfuse is enabled in config but the 'langfuse' package could not be imported. " + "Install dependencies or set langfuse.enabled to false in configuration." + ) from exc + return AsyncOpenAI(**client_kwargs) @classmethod diff --git a/tests/test_agent_config_integration.py b/tests/test_agent_config_integration.py index 690040d..b7b1839 100644 --- a/tests/test_agent_config_integration.py +++ b/tests/test_agent_config_integration.py @@ -13,6 +13,7 @@ import yaml from sgr_agent_core.agent_config import GlobalConfig +from sgr_agent_core.agent_definition import LangfuseConfig from sgr_agent_core.agents import SGRAgent, SGRToolCallingAgent from sgr_agent_core.server.settings import ServerConfig, setup_logging from tests.conftest import create_test_agent @@ -77,6 +78,138 @@ def test_invalid_config_values(self): assert agent.task_messages[0]["content"] == "Invalid config test" +class TestLangfuseConfiguration: + """Tests for Langfuse-related configuration flags.""" + + def test_langfuse_enabled_default_false(self, monkeypatch): + """Test that langfuse.enabled is False by default.""" + from sgr_agent_core import agent_config as agent_config_module + + agent_config_module.GlobalConfig._instance = None + agent_config_module.GlobalConfig._initialized = False + monkeypatch.delenv("SGR__LANGFUSE", raising=False) + monkeypatch.delenv("SGR__LANGFUSE__ENABLED", raising=False) + + config = GlobalConfig() + + assert config.langfuse.enabled is False + + def test_langfuse_enabled_from_env(self, monkeypatch): + """Test that langfuse.enabled can be enabled via nested env + variable.""" + from sgr_agent_core import agent_config as agent_config_module + + agent_config_module.GlobalConfig._instance = None + agent_config_module.GlobalConfig._initialized = False + + monkeypatch.setenv("SGR__LANGFUSE__ENABLED", "true") + + config = GlobalConfig() + + assert config.langfuse.enabled is True + + def test_langfuse_enabled_from_env_shorthand(self, monkeypatch): + """Test backward-compat: SGR__LANGFUSE=true still works.""" + from sgr_agent_core import agent_config as agent_config_module + + agent_config_module.GlobalConfig._instance = None + agent_config_module.GlobalConfig._initialized = False + monkeypatch.delenv("SGR__LANGFUSE__ENABLED", raising=False) + + monkeypatch.setenv("SGR__LANGFUSE", "true") + + config = GlobalConfig() + + assert config.langfuse.enabled is True + + def test_langfuse_enabled_from_yaml(self, tmp_path, monkeypatch): + """Test that langfuse.enabled can be enabled via nested config.yaml.""" + from sgr_agent_core import agent_config as agent_config_module + + agent_config_module.GlobalConfig._instance = None + agent_config_module.GlobalConfig._initialized = False + + config_path = tmp_path / "config.yaml" + config_path.write_text( + "\n".join( + [ + "llm:", + ' api_key: "test-key"', + ' base_url: "https://api.openai.com/v1"', + "langfuse:", + " enabled: true", + ] + ), + encoding="utf-8", + ) + + config = GlobalConfig.from_yaml(str(config_path)) + + assert config.langfuse.enabled is True + + def test_langfuse_yaml_with_credentials(self, tmp_path, monkeypatch): + """Test that Langfuse credentials are parsed from config.yaml.""" + from sgr_agent_core import agent_config as agent_config_module + + agent_config_module.GlobalConfig._instance = None + agent_config_module.GlobalConfig._initialized = False + + config_path = tmp_path / "config.yaml" + config_path.write_text( + "\n".join( + [ + "llm:", + ' api_key: "test-key"', + ' base_url: "https://api.openai.com/v1"', + "langfuse:", + " enabled: true", + ' public_key: "pk-lf-test"', + ' secret_key: "sk-lf-test"', + ' host: "http://localhost:3000"', + ] + ), + encoding="utf-8", + ) + + config = GlobalConfig.from_yaml(str(config_path)) + + assert config.langfuse.enabled is True + assert config.langfuse.public_key == "pk-lf-test" + assert config.langfuse.secret_key == "sk-lf-test" + assert config.langfuse.host == "http://localhost:3000" + + +class TestLangfuseConfigModel: + """Unit tests for LangfuseConfig helpers.""" + + def test_has_explicit_sdk_fields_false_when_empty(self): + """No credentials means no explicit SDK init kwargs.""" + cfg = LangfuseConfig(enabled=True) + assert cfg.has_explicit_sdk_fields() is False + assert cfg.to_langfuse_client_kwargs() == {} + + def test_has_explicit_sdk_fields_true_when_any_credential(self): + """Any of public_key, secret_key, host triggers explicit init.""" + assert LangfuseConfig(public_key="pk").has_explicit_sdk_fields() is True + assert LangfuseConfig(secret_key="sk").has_explicit_sdk_fields() is True + assert LangfuseConfig(host="http://h").has_explicit_sdk_fields() is True + + def test_to_langfuse_client_kwargs_partial(self): + """Only set fields appear in kwargs.""" + cfg = LangfuseConfig(public_key="pk-x", secret_key=None, host="http://localhost") + assert cfg.to_langfuse_client_kwargs() == { + "public_key": "pk-x", + "host": "http://localhost", + } + + def test_empty_strings_normalized_to_none(self): + """Empty strings are treated as unset.""" + cfg = LangfuseConfig(public_key="", secret_key="sk") + assert cfg.public_key is None + assert cfg.has_explicit_sdk_fields() is True + assert cfg.to_langfuse_client_kwargs() == {"secret_key": "sk"} + + class TestMultipleAgentConfigurationConsistency: """Tests for configuration consistency across multiple agents.""" diff --git a/tests/test_agent_factory.py b/tests/test_agent_factory.py index 58b353c..3256bf0 100644 --- a/tests/test_agent_factory.py +++ b/tests/test_agent_factory.py @@ -13,11 +13,12 @@ from sgr_agent_core.agent_definition import ( AgentDefinition, ExecutionConfig, + LangfuseConfig, LLMConfig, PromptsConfig, ToolDefinition, ) -from sgr_agent_core.agent_factory import AgentFactory +from sgr_agent_core.agent_factory import AgentFactory, LangfuseImportError from sgr_agent_core.agents import ( DialogAgent, SGRAgent, @@ -40,6 +41,7 @@ def mock_global_config(): ) mock_config.execution = ExecutionConfig() mock_config.search = None + mock_config.langfuse = LangfuseConfig() mock_config.tools = {} # Create a mock MCP config that has model_copy and model_dump methods mock_mcp = Mock() @@ -407,6 +409,92 @@ def test_create_client_with_socks_proxy(self): assert client.api_key == "test-key" assert client._client is not None + def test_create_client_uses_langfuse_when_enabled(self, monkeypatch): + """Test that _create_client uses Langfuse AsyncOpenAI when enabled.""" + from types import SimpleNamespace + + class DummyAsyncOpenAI: + def __init__(self, **kwargs): + self.kwargs = kwargs + + mock_config = Mock() + mock_config.langfuse = LangfuseConfig(enabled=True) # no credentials + + with ( + patch("sgr_agent_core.agent_factory.GlobalConfig", return_value=mock_config), + patch( + "sgr_agent_core.agent_factory.import_module", + return_value=SimpleNamespace(AsyncOpenAI=DummyAsyncOpenAI), + ), + ): + llm_config = LLMConfig( + api_key="test-key", + base_url="https://api.openai.com/v1", + ) + client = AgentFactory._create_client(llm_config) + + assert isinstance(client, DummyAsyncOpenAI) + assert client.kwargs["api_key"] == "test-key" + assert client.kwargs["base_url"] == "https://api.openai.com/v1" + + def test_create_client_raises_when_langfuse_missing(self): + """Test that _create_client raises if Langfuse is enabled but package + is missing.""" + mock_config = Mock() + mock_config.langfuse = LangfuseConfig(enabled=True) + + with ( + patch("sgr_agent_core.agent_factory.GlobalConfig", return_value=mock_config), + patch("sgr_agent_core.agent_factory.import_module", side_effect=ImportError), + ): + llm_config = LLMConfig( + api_key="test-key", + base_url="https://api.openai.com/v1", + ) + with pytest.raises(LangfuseImportError): + AgentFactory._create_client(llm_config) + + def test_create_client_inits_langfuse_with_credentials(self): + """Test that Langfuse() is initialized with explicit credentials from + config.""" + from types import SimpleNamespace + + class DummyAsyncOpenAI: + def __init__(self, **kwargs): + self.kwargs = kwargs + + class DummyLangfuse: + def __init__(self, **kwargs): + self.kwargs = kwargs + + dummy_langfuse_module = SimpleNamespace(Langfuse=DummyLangfuse) + dummy_openai_module = SimpleNamespace(AsyncOpenAI=DummyAsyncOpenAI) + + mock_config = Mock() + mock_config.langfuse = LangfuseConfig( + enabled=True, + public_key="pk-test", + secret_key="sk-test", + host="http://localhost:3000", + ) + + def fake_import(name): + if name == "langfuse": + return dummy_langfuse_module + if name == "langfuse.openai": + return dummy_openai_module + raise ImportError(name) + + with ( + patch("sgr_agent_core.agent_factory.GlobalConfig", return_value=mock_config), + patch("sgr_agent_core.agent_factory.import_module", side_effect=fake_import), + ): + llm_config = LLMConfig(api_key="test-key", base_url="https://api.openai.com/v1") + client = AgentFactory._create_client(llm_config) + + assert isinstance(client, DummyAsyncOpenAI) + # Verify Langfuse was initialized with explicit credentials + @pytest.mark.asyncio async def test_stream_request_with_extra_parameters(self): """Test that additional parameters from LLMConfig (extra='allow') are