Skip to content

Commit 6f93d28

Browse files
committed
rename callback.client
1 parent 95c16e6 commit 6f93d28

5 files changed

Lines changed: 42 additions & 93 deletions

File tree

langfuse/langchain/CallbackHandler.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@
5757

5858
class LangchainCallbackHandler(LangchainBaseCallbackHandler):
5959
def __init__(self, *, public_key: Optional[str] = None) -> None:
60-
self.langfuse_client = get_client(public_key=public_key)
60+
self.client = get_client(public_key=public_key)
6161

6262
self.runs: Dict[UUID, Union[LangfuseSpan, LangfuseGeneration]] = {}
6363
self.prompt_to_parent_run_map = {}
@@ -174,7 +174,7 @@ def on_chain_start(
174174
}
175175

176176
if parent_run_id is None:
177-
self.runs[run_id] = self.langfuse_client.start_span(**content)
177+
self.runs[run_id] = self.client.start_span(**content)
178178
else:
179179
self.runs[run_id] = cast(
180180
LangfuseSpan, self.runs[parent_run_id]
@@ -439,7 +439,7 @@ def on_retriever_start(
439439
"level": "DEBUG" if tags and LANGSMITH_TAG_HIDDEN in tags else None,
440440
}
441441

442-
self.runs[run_id] = self.langfuse_client.start_span(**content)
442+
self.runs[run_id] = self.client.start_span(**content)
443443
else:
444444
self.runs[run_id] = cast(
445445
LangfuseSpan, self.runs[parent_run_id]
@@ -562,7 +562,7 @@ def __on_llm_action(
562562
LangfuseSpan, self.runs[parent_run_id]
563563
).start_generation(**content)
564564
else:
565-
self.runs[run_id] = self.langfuse_client.start_generation(**content)
565+
self.runs[run_id] = self.client.start_generation(**content)
566566

567567
except Exception as e:
568568
langfuse_logger.exception(e)

tests/test_extract_model.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@ def test_models(expected_model: str, model: Any):
135135
def test_entire_llm_call(expected_model, model):
136136
callback = CallbackHandler()
137137

138-
with callback.langfuse_client.start_as_current_span(name="parent") as span:
138+
with callback.client.start_as_current_span(name="parent") as span:
139139
trace_id = span.trace_id
140140

141141
try:
@@ -146,7 +146,7 @@ def test_entire_llm_call(expected_model, model):
146146
print(e)
147147
pass
148148

149-
callback.langfuse_client.flush()
149+
callback.client.flush()
150150
api = get_api()
151151

152152
trace = api.trace.get(trace_id)

tests/test_extract_model_langchain_openai.py

Lines changed: 0 additions & 51 deletions
This file was deleted.

tests/test_langchain.py

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -327,7 +327,7 @@ def test_callback_retriever_conversational_with_memory():
327327
)
328328
conversation.predict(input="Hi there!", callbacks=[handler])
329329

330-
handler.langfuse_client.flush()
330+
handler.client.flush()
331331

332332
trace = get_api().trace.get(trace_id)
333333

@@ -378,7 +378,7 @@ def test_callback_retriever_conversational():
378378

379379
chain({"question": query, "chat_history": []}, callbacks=[handler])
380380

381-
handler.langfuse_client.flush()
381+
handler.client.flush()
382382

383383
trace = api_wrapper.get_trace(trace_id)
384384

@@ -415,7 +415,7 @@ def test_callback_simple_openai():
415415
llm.invoke(text, config={"callbacks": [handler], "run_name": test_name})
416416

417417
# Ensure data is flushed to API
418-
handler.langfuse_client.flush()
418+
handler.client.flush()
419419
sleep(2)
420420

421421
# Retrieve trace
@@ -459,7 +459,7 @@ def test_callback_multiple_invocations_on_different_traces():
459459
handler2 = CallbackHandler()
460460
llm.invoke(text, config={"callbacks": [handler2], "run_name": test_name_2})
461461

462-
handler1.langfuse_client.flush()
462+
handler1.client.flush()
463463

464464
# Ensure data is flushed to API
465465
sleep(2)
@@ -543,7 +543,7 @@ def record_dog(name: str, color: str, fav_food: OptionalFavFood) -> str:
543543
callbacks=[handler],
544544
)
545545

546-
handler.langfuse_client.flush()
546+
handler.client.flush()
547547

548548
trace = get_api().trace.get(trace_id)
549549

@@ -635,7 +635,7 @@ def get_word_length(word: str) -> int:
635635
config={"callbacks": [callback]},
636636
)
637637

638-
callback.langfuse_client.flush()
638+
callback.client.flush()
639639

640640
trace = get_api().trace.get(trace_id)
641641

@@ -710,7 +710,7 @@ def _identifying_params(self) -> Mapping[str, Any]:
710710
)
711711
sequential_chain.run("This is a foobar thing", callbacks=[callback])
712712

713-
callback.langfuse_client.flush()
713+
callback.client.flush()
714714

715715
trace = get_api().trace.get(trace_id)
716716

@@ -763,7 +763,7 @@ def test_openai_instruct_usage():
763763
]
764764
runnable_chain.batch(input_list)
765765

766-
lf_handler.langfuse_client.flush()
766+
lf_handler.client.flush()
767767

768768
observations = get_api().trace.get(trace_id).observations
769769

@@ -933,7 +933,7 @@ def test_link_langfuse_prompts_invoke():
933933
},
934934
)
935935

936-
langfuse_handler.langfuse_client.flush()
936+
langfuse_handler.client.flush()
937937
sleep(2)
938938

939939
trace = get_api().trace.get(trace_id=trace_id)
@@ -1020,7 +1020,7 @@ def test_link_langfuse_prompts_stream():
10201020
for chunk in stream:
10211021
output += chunk
10221022

1023-
langfuse_handler.langfuse_client.flush()
1023+
langfuse_handler.client.flush()
10241024
sleep(2)
10251025

10261026
trace = get_api().trace.get(trace_id=trace_id)
@@ -1106,7 +1106,7 @@ def test_link_langfuse_prompts_batch():
11061106
},
11071107
)
11081108

1109-
langfuse_handler.langfuse_client.flush()
1109+
langfuse_handler.client.flush()
11101110

11111111
traces = get_api().trace.list(name=trace_name).data
11121112

@@ -1229,13 +1229,13 @@ class GetWeather(BaseModel):
12291229
}
12301230
]
12311231

1232-
with handler.langfuse_client.start_as_current_span(
1232+
with handler.client.start_as_current_span(
12331233
name="test_callback_openai_functions_with_tools"
12341234
) as span:
12351235
trace_id = span.trace_id
12361236
llm.bind_tools([address_tool, weather_tool]).invoke(messages)
12371237

1238-
handler.langfuse_client.flush()
1238+
handler.client.flush()
12391239

12401240
trace = get_api().trace.get(trace_id=trace_id)
12411241

@@ -1328,11 +1328,11 @@ def test_multimodal():
13281328
],
13291329
)
13301330

1331-
with handler.langfuse_client.start_as_current_span(name="test_multimodal") as span:
1331+
with handler.client.start_as_current_span(name="test_multimodal") as span:
13321332
trace_id = span.trace_id
13331333
model.invoke([message], config={"callbacks": [handler]})
13341334

1335-
handler.langfuse_client.flush()
1335+
handler.client.flush()
13361336

13371337
trace = get_api().trace.get(trace_id=trace_id)
13381338

@@ -1417,14 +1417,14 @@ def call_model(state: MessagesState):
14171417
handler = CallbackHandler()
14181418

14191419
# Use the Runnable
1420-
with handler.langfuse_client.start_as_current_span(name="test_langgraph") as span:
1420+
with handler.client.start_as_current_span(name="test_langgraph") as span:
14211421
trace_id = span.trace_id
14221422
final_state = app.invoke(
14231423
{"messages": [HumanMessage(content="what is the weather in sf")]},
14241424
config={"configurable": {"thread_id": 42}, "callbacks": [handler]},
14251425
)
14261426
print(final_state["messages"][-1].content)
1427-
handler.langfuse_client.flush()
1427+
handler.client.flush()
14281428

14291429
trace = get_api().trace.get(trace_id=trace_id)
14301430

@@ -1467,7 +1467,7 @@ def test_cached_token_usage():
14671467
# invoke again to force cached token usage
14681468
chain.invoke({"test_param": "in a funny way"}, config)
14691469

1470-
handler.langfuse_client.flush()
1470+
handler.client.flush()
14711471

14721472
trace = get_api().trace.get(handler.get_trace_id())
14731473

0 commit comments

Comments
 (0)