@@ -613,12 +613,17 @@ def _set_completions_api_output_data(
613613def _wrap_synchronous_completions_chunk_iterator (
614614 span : "Span" ,
615615 integration : "OpenAIIntegration" ,
616- start_time : "float" ,
616+ start_time : "Optional[ float] " ,
617617 messages : "Iterable[ChatCompletionMessageParam]" ,
618618 response : "Stream[ChatCompletionChunk]" ,
619619 old_iterator : "Iterator[ChatCompletionChunk]" ,
620620 finish_span : "bool" ,
621621):
622+ """
623+ Sets information received while iterating the response stream on the AI Client Span.
624+ Compute token count based on inputs and outputs using tiktoken if token counts are not in the model response.
625+ Responsible for closing the AI Client Span if instructed to by the `finish_span` argument.
626+ """
622627 ttft = None
623628 data_buf : "list[list[str]]" = [] # one for each choice
624629
@@ -664,13 +669,17 @@ def _wrap_synchronous_completions_chunk_iterator(
664669async def _wrap_asynchronous_completions_chunk_iterator (
665670 span : "Span" ,
666671 integration : "OpenAIIntegration" ,
667- start_time : "float" ,
672+ start_time : "Optional[ float] " ,
668673 messages : "Iterable[ChatCompletionMessageParam]" ,
669674 response : "AsyncStream[ChatCompletionChunk]" ,
670675 old_iterator : "AsyncIterator[ChatCompletionChunk]" ,
671676 finish_span : "bool" ,
672677):
673- start_time = time .perf_counter ()
678+ """
679+ Sets information received while iterating the response stream on the AI Client Span.
680+ Compute token count based on inputs and outputs using tiktoken if token counts are not in the model response.
681+ Responsible for closing the AI Client Span if instructed to by the `finish_span` argument.
682+ """
674683 ttft = None
675684 data_buf : "list[list[str]]" = [] # one for each choice
676685
@@ -716,13 +725,17 @@ async def _wrap_asynchronous_completions_chunk_iterator(
716725def _wrap_synchronous_responses_event_iterator (
717726 span : "Span" ,
718727 integration : "OpenAIIntegration" ,
719- start_time : "float" ,
728+ start_time : "Optional[ float] " ,
720729 input : "Optional[Union[str, ResponseInputParam]]" ,
721730 response : "Stream[ResponseStreamEvent]" ,
722731 old_iterator : "Iterator[ResponseStreamEvent]" ,
723732 finish_span : "bool" ,
724733):
725- start_time = time .perf_counter ()
734+ """
735+ Sets information received while iterating the response stream on the AI Client Span.
736+ Compute token count based on inputs and outputs using tiktoken if token counts are not in the model response.
737+ Responsible for closing the AI Client Span if instructed to by the `finish_span` argument.
738+ """
726739 ttft = None
727740 data_buf : "list[list[str]]" = [] # one for each choice
728741
@@ -775,12 +788,17 @@ def _wrap_synchronous_responses_event_iterator(
775788async def _wrap_asynchronous_responses_event_iterator (
776789 span : "Span" ,
777790 integration : "OpenAIIntegration" ,
778- start_time : "float" ,
791+ start_time : "Optional[ float] " ,
779792 input : "Optional[Union[str, ResponseInputParam]]" ,
780793 response : "AsyncStream[ResponseStreamEvent]" ,
781794 old_iterator : "AsyncIterator[ResponseStreamEvent]" ,
782795 finish_span : "bool" ,
783796):
797+ """
798+ Sets information received while iterating the response stream on the AI Client Span.
799+ Compute token count based on inputs and outputs using tiktoken if token counts are not in the model response.
800+ Responsible for closing the AI Client Span if instructed to by the `finish_span` argument.
801+ """
784802 ttft : "Optional[float]" = None
785803 data_buf : "list[list[str]]" = [] # one for each choice
786804
0 commit comments