Skip to content

Commit 14f73bd

Browse files
committed
overload all the things
1 parent 5aba0ba commit 14f73bd

File tree

1 file changed

+205
-93
lines changed

1 file changed

+205
-93
lines changed

langfuse/_client/client.py

Lines changed: 205 additions & 93 deletions
Original file line numberDiff line numberDiff line change
@@ -397,46 +397,106 @@ def start_as_current_span(
397397
child_span.update(output="sub-result")
398398
```
399399
"""
400-
if trace_context:
401-
trace_id = trace_context.get("trace_id", None)
402-
parent_span_id = trace_context.get("parent_span_id", None)
400+
return self.start_as_current_observation(
401+
trace_context=trace_context,
402+
name=name,
403+
as_type="span",
404+
input=input,
405+
output=output,
406+
metadata=metadata,
407+
version=version,
408+
level=level,
409+
status_message=status_message,
410+
end_on_exit=end_on_exit,
411+
)
403412

404-
if trace_id:
405-
remote_parent_span = self._create_remote_parent_span(
406-
trace_id=trace_id, parent_span_id=parent_span_id
407-
)
413+
def start_observation(
414+
self,
415+
*,
416+
trace_context: Optional[TraceContext] = None,
417+
name: str,
418+
as_type: ObservationTypeLiteralNoEvent = "span",
419+
input: Optional[Any] = None,
420+
output: Optional[Any] = None,
421+
metadata: Optional[Any] = None,
422+
version: Optional[str] = None,
423+
level: Optional[SpanLevel] = None,
424+
status_message: Optional[str] = None,
425+
completion_start_time: Optional[datetime] = None,
426+
model: Optional[str] = None,
427+
model_parameters: Optional[Dict[str, MapValue]] = None,
428+
usage_details: Optional[Dict[str, int]] = None,
429+
cost_details: Optional[Dict[str, float]] = None,
430+
prompt: Optional[PromptClient] = None,
431+
) -> Union[
432+
LangfuseSpan,
433+
LangfuseGeneration,
434+
LangfuseAgent,
435+
LangfuseTool,
436+
LangfuseChain,
437+
LangfuseRetriever,
438+
LangfuseEvaluator,
439+
LangfuseEmbedding,
440+
LangfuseGuardrail,
441+
]:
442+
"""Create a new observation of the specified type.
408443
409-
return cast(
410-
_AgnosticContextManager[LangfuseSpan],
411-
self._create_span_with_parent_context(
412-
as_type="span",
413-
name=name,
414-
remote_parent_span=remote_parent_span,
415-
parent=None,
416-
end_on_exit=end_on_exit,
417-
input=input,
418-
output=output,
419-
metadata=metadata,
420-
version=version,
421-
level=level,
422-
status_message=status_message,
423-
),
424-
)
444+
This method creates a new observation but does not set it as the current span in the
445+
context. To create and use an observation within a context, use start_as_current_observation().
425446
426-
return cast(
427-
_AgnosticContextManager[LangfuseSpan],
428-
self._start_as_current_otel_span_with_processed_media(
429-
as_type="span",
447+
Args:
448+
trace_context: Optional context for connecting to an existing trace
449+
name: Name of the observation
450+
as_type: Type of observation to create (defaults to "span")
451+
input: Input data for the operation
452+
output: Output data from the operation
453+
metadata: Additional metadata to associate with the observation
454+
version: Version identifier for the code or component
455+
level: Importance level of the observation
456+
status_message: Optional status message for the observation
457+
completion_start_time: When the model started generating (for generation types)
458+
model: Name/identifier of the AI model used (for generation types)
459+
model_parameters: Parameters used for the model (for generation types)
460+
usage_details: Token usage information (for generation types)
461+
cost_details: Cost information (for generation types)
462+
prompt: Associated prompt template (for generation types)
463+
464+
Returns:
465+
An observation object of the appropriate type that must be ended with .end()
466+
"""
467+
if as_type == "generation":
468+
return self.start_generation(
469+
trace_context=trace_context,
430470
name=name,
431-
end_on_exit=end_on_exit,
432471
input=input,
433472
output=output,
434473
metadata=metadata,
435474
version=version,
436475
level=level,
437476
status_message=status_message,
438-
),
439-
)
477+
completion_start_time=completion_start_time,
478+
model=model,
479+
model_parameters=model_parameters,
480+
usage_details=usage_details,
481+
cost_details=cost_details,
482+
prompt=prompt,
483+
)
484+
else:
485+
# For all other types, create a span and set the appropriate type
486+
span = self.start_span(
487+
trace_context=trace_context,
488+
name=name,
489+
input=input,
490+
output=output,
491+
metadata=metadata,
492+
version=version,
493+
level=level,
494+
status_message=status_message,
495+
)
496+
# Set the observation type on the span
497+
span._observation_type = as_type
498+
span._otel_span.set_attribute("langfuse.observation.type", as_type)
499+
return span
440500

441501
def start_generation(
442502
self,
@@ -624,57 +684,23 @@ def start_as_current_generation(
624684
)
625685
```
626686
"""
627-
if trace_context:
628-
trace_id = trace_context.get("trace_id", None)
629-
parent_span_id = trace_context.get("parent_span_id", None)
630-
631-
if trace_id:
632-
remote_parent_span = self._create_remote_parent_span(
633-
trace_id=trace_id, parent_span_id=parent_span_id
634-
)
635-
636-
return cast(
637-
_AgnosticContextManager[LangfuseGeneration],
638-
self._create_span_with_parent_context(
639-
as_type="generation",
640-
name=name,
641-
remote_parent_span=remote_parent_span,
642-
parent=None,
643-
end_on_exit=end_on_exit,
644-
input=input,
645-
output=output,
646-
metadata=metadata,
647-
version=version,
648-
level=level,
649-
status_message=status_message,
650-
completion_start_time=completion_start_time,
651-
model=model,
652-
model_parameters=model_parameters,
653-
usage_details=usage_details,
654-
cost_details=cost_details,
655-
prompt=prompt,
656-
),
657-
)
658-
659-
return cast(
660-
_AgnosticContextManager[LangfuseGeneration],
661-
self._start_as_current_otel_span_with_processed_media(
662-
as_type="generation",
663-
name=name,
664-
end_on_exit=end_on_exit,
665-
input=input,
666-
output=output,
667-
metadata=metadata,
668-
version=version,
669-
level=level,
670-
status_message=status_message,
671-
completion_start_time=completion_start_time,
672-
model=model,
673-
model_parameters=model_parameters,
674-
usage_details=usage_details,
675-
cost_details=cost_details,
676-
prompt=prompt,
677-
),
687+
return self.start_as_current_observation(
688+
trace_context=trace_context,
689+
name=name,
690+
as_type="generation",
691+
input=input,
692+
output=output,
693+
metadata=metadata,
694+
version=version,
695+
level=level,
696+
status_message=status_message,
697+
completion_start_time=completion_start_time,
698+
model=model,
699+
model_parameters=model_parameters,
700+
usage_details=usage_details,
701+
cost_details=cost_details,
702+
prompt=prompt,
703+
end_on_exit=end_on_exit,
678704
)
679705

680706
@overload
@@ -721,25 +747,111 @@ def start_as_current_observation(
721747
*,
722748
trace_context: Optional[TraceContext] = None,
723749
name: str,
724-
as_type: Literal[
725-
"agent", "tool", "chain", "retriever", "evaluator", "embedding", "guardrail"
726-
],
750+
as_type: Literal["agent"],
727751
input: Optional[Any] = None,
728752
output: Optional[Any] = None,
729753
metadata: Optional[Any] = None,
730754
version: Optional[str] = None,
731755
level: Optional[SpanLevel] = None,
732756
status_message: Optional[str] = None,
733757
end_on_exit: Optional[bool] = None,
734-
) -> Union[
735-
_AgnosticContextManager[LangfuseAgent],
736-
_AgnosticContextManager[LangfuseTool],
737-
_AgnosticContextManager[LangfuseChain],
738-
_AgnosticContextManager[LangfuseRetriever],
739-
_AgnosticContextManager[LangfuseEvaluator],
740-
_AgnosticContextManager[LangfuseEmbedding],
741-
_AgnosticContextManager[LangfuseGuardrail],
742-
]: ...
758+
) -> _AgnosticContextManager[LangfuseAgent]: ...
759+
760+
@overload
761+
def start_as_current_observation(
762+
self,
763+
*,
764+
trace_context: Optional[TraceContext] = None,
765+
name: str,
766+
as_type: Literal["tool"],
767+
input: Optional[Any] = None,
768+
output: Optional[Any] = None,
769+
metadata: Optional[Any] = None,
770+
version: Optional[str] = None,
771+
level: Optional[SpanLevel] = None,
772+
status_message: Optional[str] = None,
773+
end_on_exit: Optional[bool] = None,
774+
) -> _AgnosticContextManager[LangfuseTool]: ...
775+
776+
@overload
777+
def start_as_current_observation(
778+
self,
779+
*,
780+
trace_context: Optional[TraceContext] = None,
781+
name: str,
782+
as_type: Literal["chain"],
783+
input: Optional[Any] = None,
784+
output: Optional[Any] = None,
785+
metadata: Optional[Any] = None,
786+
version: Optional[str] = None,
787+
level: Optional[SpanLevel] = None,
788+
status_message: Optional[str] = None,
789+
end_on_exit: Optional[bool] = None,
790+
) -> _AgnosticContextManager[LangfuseChain]: ...
791+
792+
@overload
793+
def start_as_current_observation(
794+
self,
795+
*,
796+
trace_context: Optional[TraceContext] = None,
797+
name: str,
798+
as_type: Literal["retriever"],
799+
input: Optional[Any] = None,
800+
output: Optional[Any] = None,
801+
metadata: Optional[Any] = None,
802+
version: Optional[str] = None,
803+
level: Optional[SpanLevel] = None,
804+
status_message: Optional[str] = None,
805+
end_on_exit: Optional[bool] = None,
806+
) -> _AgnosticContextManager[LangfuseRetriever]: ...
807+
808+
@overload
809+
def start_as_current_observation(
810+
self,
811+
*,
812+
trace_context: Optional[TraceContext] = None,
813+
name: str,
814+
as_type: Literal["evaluator"],
815+
input: Optional[Any] = None,
816+
output: Optional[Any] = None,
817+
metadata: Optional[Any] = None,
818+
version: Optional[str] = None,
819+
level: Optional[SpanLevel] = None,
820+
status_message: Optional[str] = None,
821+
end_on_exit: Optional[bool] = None,
822+
) -> _AgnosticContextManager[LangfuseEvaluator]: ...
823+
824+
@overload
825+
def start_as_current_observation(
826+
self,
827+
*,
828+
trace_context: Optional[TraceContext] = None,
829+
name: str,
830+
as_type: Literal["embedding"],
831+
input: Optional[Any] = None,
832+
output: Optional[Any] = None,
833+
metadata: Optional[Any] = None,
834+
version: Optional[str] = None,
835+
level: Optional[SpanLevel] = None,
836+
status_message: Optional[str] = None,
837+
end_on_exit: Optional[bool] = None,
838+
) -> _AgnosticContextManager[LangfuseEmbedding]: ...
839+
840+
@overload
841+
def start_as_current_observation(
842+
self,
843+
*,
844+
trace_context: Optional[TraceContext] = None,
845+
name: str,
846+
as_type: Literal["guardrail"],
847+
input: Optional[Any] = None,
848+
output: Optional[Any] = None,
849+
metadata: Optional[Any] = None,
850+
version: Optional[str] = None,
851+
level: Optional[SpanLevel] = None,
852+
status_message: Optional[str] = None,
853+
end_on_exit: Optional[bool] = None,
854+
) -> _AgnosticContextManager[LangfuseGuardrail]: ...
743855

744856
def start_as_current_observation(
745857
self,

0 commit comments

Comments
 (0)