@@ -1080,25 +1080,6 @@ async def acompletion(
10801080 },
10811081 )
10821082
1083- headers , response = await self .make_openai_chat_completion_request (
1084- openai_aclient = openai_aclient , data = data , timeout = timeout
1085- )
1086- stringified_response = response .model_dump ()
1087- logging_obj .post_call (
1088- input = data ["messages" ],
1089- api_key = api_key ,
1090- original_response = stringified_response ,
1091- additional_args = {"complete_input_dict" : data },
1092- )
1093- logging_obj .model_call_details ["response_headers" ] = headers
1094- return convert_to_model_response_object (
1095- response_object = stringified_response ,
1096- model_response_object = model_response ,
1097- hidden_params = {"headers" : headers },
1098- _response_headers = headers ,
1099- )
1100- except Exception as e :
1101- raise e
11021083 headers , response = await self .make_openai_chat_completion_request (
11031084 openai_aclient = openai_aclient , data = data , timeout = timeout
11041085 )
@@ -1114,6 +1095,7 @@ async def acompletion(
11141095 response_object = stringified_response ,
11151096 model_response_object = model_response ,
11161097 hidden_params = {"headers" : headers },
1098+ _response_headers = headers ,
11171099 )
11181100 except openai .UnprocessableEntityError as e :
11191101 ## check if body contains unprocessable params - related issue https://github.com/BerriAI/litellm/issues/4800
@@ -1233,30 +1215,8 @@ async def async_streaming(
12331215 },
12341216 )
12351217
1236- headers , response = await self .make_openai_chat_completion_request (
1237- openai_aclient = openai_aclient , data = data , timeout = timeout
1238- )
1239- logging_obj .model_call_details ["response_headers" ] = headers
1240- streamwrapper = CustomStreamWrapper (
1241- completion_stream = response ,
1242- model = model ,
1243- custom_llm_provider = "openai" ,
1244- logging_obj = logging_obj ,
1245- stream_options = data .get ("stream_options" , None ),
1246- _response_headers = headers ,
1247- )
1248- return streamwrapper
1249- except (
1250- Exception
1251- ) as e : # need to exception handle here. async exceptions don't get caught in sync functions.
1252- if response is not None and hasattr (response , "text" ):
1253- raise OpenAIError (
1254- status_code = 500 ,
1255- message = f"{ str (e )} \n \n Original Response: { response .text } " ,
1256-
12571218 headers , response = await self .make_openai_chat_completion_request (
12581219 openai_aclient = openai_aclient , data = data , timeout = timeout
1259-
12601220 )
12611221 logging_obj .model_call_details ["response_headers" ] = headers
12621222 streamwrapper = CustomStreamWrapper (
@@ -1265,6 +1225,7 @@ async def async_streaming(
12651225 custom_llm_provider = "openai" ,
12661226 logging_obj = logging_obj ,
12671227 stream_options = data .get ("stream_options" , None ),
1228+ _response_headers = headers ,
12681229 )
12691230 return streamwrapper
12701231 except openai .UnprocessableEntityError as e :
0 commit comments