Skip to content

Commit 1928db8

Browse files
feat: implement Phase 1 zero-config observability with --observe langfuse flag
- Add global --observe flag to CLI with PRAISONAI_OBSERVE env var support - Wire observability automatically into Langflow Agent and Agents components - Update langfuse_example.py to use current TraceSinkProtocol API - Graceful degradation when Langfuse not installed Enables users to run: - praisonai run workflow.yaml --observe langfuse - PRAISONAI_OBSERVE=langfuse praisonai flow Fixes #1364 (Phase 1) 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
1 parent 6693a75 commit 1928db8

4 files changed

Lines changed: 98 additions & 30 deletions

File tree

Lines changed: 21 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1,48 +1,39 @@
11
"""
2-
Langfuse Integration Example
2+
Langfuse Integration Example (Updated for TraceSinkProtocol)
33
4-
This example shows how to use Langfuse for LLM observability.
4+
This example shows how to use Langfuse for LLM observability with PraisonAI's native trace infrastructure.
55
66
Setup:
7-
1. Sign up at https://langfuse.com/
8-
2. Get your API keys from the project settings
9-
3. Set environment variables:
10-
export LANGFUSE_PUBLIC_KEY=pk-lf-xxx
11-
export LANGFUSE_SECRET_KEY=sk-lf-xxx
12-
4. Install dependencies:
13-
pip install opentelemetry-sdk opentelemetry-exporter-otlp
7+
pip install "praisonai[langfuse]"
8+
export LANGFUSE_PUBLIC_KEY=pk-lf-xxx
9+
export LANGFUSE_SECRET_KEY=sk-lf-xxx
1410
1511
Usage:
1612
python langfuse_example.py
1713
"""
18-
19-
import os
20-
from praisonai_tools.observability import obs
2114
from praisonaiagents import Agent
22-
23-
# Initialize Langfuse
24-
success = obs.init(
25-
provider="langfuse",
26-
project_name="praisonai-demo",
15+
from praisonai.observability import LangfuseSink
16+
from praisonaiagents.trace.context_events import (
17+
ContextTraceEmitter, set_context_emitter
2718
)
2819

29-
if not success:
30-
print("Failed to initialize Langfuse. Check your API keys.")
31-
print("Required: LANGFUSE_PUBLIC_KEY, LANGFUSE_SECRET_KEY")
32-
exit(1)
20+
# Initialize Langfuse observability
21+
sink = LangfuseSink()
22+
emitter = ContextTraceEmitter(sink=sink, enabled=True)
23+
set_context_emitter(emitter)
3324

34-
print("Langfuse initialized successfully!")
35-
print(f"View traces at: https://cloud.langfuse.com/")
36-
37-
# Create agent
25+
# Create and run agent — all traces automatically captured
3826
agent = Agent(
27+
name="Coder",
3928
instructions="You are a helpful coding assistant.",
40-
model="gpt-4o-mini",
29+
llm="openai/gpt-4o-mini",
4130
)
4231

43-
# Run with tracing
44-
with obs.trace("coding-session", user_id="developer-1"):
45-
response = agent.chat("Write a Python function to calculate fibonacci numbers")
46-
print(response)
32+
result = agent.start("Write a Python function to calculate fibonacci numbers")
33+
print(result)
34+
35+
# Flush traces
36+
sink.flush()
37+
sink.close()
4738

4839
print("\nCheck Langfuse dashboard for traces!")

src/praisonai/praisonai/cli/app.py

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,32 @@
1313
from .state.identifiers import create_context
1414

1515

16+
def _setup_langfuse_observability() -> None:
17+
"""Set up Langfuse observability by wiring TraceSink to both action and context emitters."""
18+
try:
19+
from praisonai.observability.langfuse import LangfuseSink
20+
from praisonaiagents.trace.protocol import TraceEmitter, set_default_emitter
21+
from praisonaiagents.trace.context_events import ContextTraceEmitter, set_context_emitter
22+
23+
# Create LangfuseSink (auto-reads env vars)
24+
sink = LangfuseSink()
25+
26+
# Set up action-level trace emitter
27+
emitter = TraceEmitter(sink=sink, enabled=True)
28+
set_default_emitter(emitter)
29+
30+
# Set up context-level trace emitter
31+
ctx_emitter = ContextTraceEmitter(sink=sink, enabled=True)
32+
set_context_emitter(ctx_emitter)
33+
34+
except ImportError:
35+
# Gracefully degrade if Langfuse not installed
36+
pass
37+
except Exception:
38+
# Silently fail to avoid breaking CLI if observability setup fails
39+
pass
40+
41+
1642
class OutputFormat(str, Enum):
1743
"""Output format options."""
1844
text = "text"
@@ -38,6 +64,7 @@ class GlobalState:
3864
quiet: bool = False
3965
verbose: bool = False
4066
screen_reader: bool = False
67+
observe: Optional[str] = None
4168
output_controller: Optional[OutputController] = None
4269

4370

@@ -98,6 +125,13 @@ def main_callback(
98125
"--screen-reader",
99126
help="Screen reader friendly output (no spinners/panels)",
100127
),
128+
observe: Optional[str] = typer.Option(
129+
None,
130+
"--observe",
131+
"-O",
132+
help="Enable observability (langfuse, langsmith, etc.)",
133+
envvar="PRAISONAI_OBSERVE",
134+
),
101135
):
102136
"""
103137
PraisonAI - AI Agents Framework CLI.
@@ -110,11 +144,16 @@ def main_callback(
110144
state.quiet = quiet
111145
state.verbose = verbose
112146
state.screen_reader = screen_reader
147+
state.observe = observe
113148

114149
# Handle --json alias
115150
if json_output:
116151
state.output_format = OutputFormat.json
117152

153+
# Set up observability if requested
154+
if observe == "langfuse":
155+
_setup_langfuse_observability()
156+
118157
# Determine output mode
119158
if state.quiet:
120159
mode = OutputMode.QUIET

src/praisonai/praisonai/flow/components/PraisonAI/praisonai_agent.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -405,6 +405,9 @@ def build_agent(self) -> Any:
405405
def build_response(self) -> Message:
406406
"""Execute the agent and return the response as a Message."""
407407
agent = self.build_agent()
408+
409+
# Wire up observability if configured
410+
self._setup_observability()
408411

409412
# Get input value
410413
input_value = self.input_value
@@ -434,3 +437,19 @@ def _get_llm(self) -> str:
434437
if converted:
435438
return converted
436439
return self.llm
440+
441+
def _setup_observability(self) -> None:
442+
"""Auto-configure observability from environment variables."""
443+
import os
444+
observe = os.environ.get("PRAISONAI_OBSERVE", "")
445+
if observe == "langfuse":
446+
try:
447+
from praisonai.observability.langfuse import LangfuseSink
448+
from praisonaiagents.trace.context_events import (
449+
ContextTraceEmitter, set_context_emitter
450+
)
451+
sink = LangfuseSink()
452+
emitter = ContextTraceEmitter(sink=sink, enabled=True)
453+
set_context_emitter(emitter)
454+
except ImportError:
455+
pass # Langfuse not installed, gracefully degrade

src/praisonai/praisonai/flow/components/PraisonAI/praisonai_agents.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -307,6 +307,9 @@ def build_agents(self) -> Any:
307307
def build_response(self) -> Message:
308308
"""Execute the multi-agent workflow and return the response."""
309309
agents_instance = self.build_agents()
310+
311+
# Wire up observability if configured
312+
self._setup_observability()
310313

311314
# Get input value
312315
input_value = self.input_value
@@ -326,3 +329,19 @@ def build_response(self) -> Message:
326329
async def build_response_async(self) -> Message:
327330
"""Execute the multi-agent workflow asynchronously."""
328331
return await asyncio.to_thread(self.build_response)
332+
333+
def _setup_observability(self) -> None:
334+
"""Auto-configure observability from environment variables."""
335+
import os
336+
observe = os.environ.get("PRAISONAI_OBSERVE", "")
337+
if observe == "langfuse":
338+
try:
339+
from praisonai.observability.langfuse import LangfuseSink
340+
from praisonaiagents.trace.context_events import (
341+
ContextTraceEmitter, set_context_emitter
342+
)
343+
sink = LangfuseSink()
344+
emitter = ContextTraceEmitter(sink=sink, enabled=True)
345+
set_context_emitter(emitter)
346+
except ImportError:
347+
pass # Langfuse not installed, gracefully degrade

0 commit comments

Comments
 (0)