fix: fix tongyi models blocking mode with incremental_output=stream (#13620)
This commit is contained in:
parent
2b86465d4c
commit
a3d3e30e3a
@ -197,7 +197,7 @@ class TongyiLargeLanguageModel(LargeLanguageModel):
|
|||||||
else:
|
else:
|
||||||
# nothing different between chat model and completion model in tongyi
|
# nothing different between chat model and completion model in tongyi
|
||||||
params["messages"] = self._convert_prompt_messages_to_tongyi_messages(prompt_messages)
|
params["messages"] = self._convert_prompt_messages_to_tongyi_messages(prompt_messages)
|
||||||
response = Generation.call(**params, result_format="message", stream=stream, incremental_output=True)
|
response = Generation.call(**params, result_format="message", stream=stream, incremental_output=stream)
|
||||||
if stream:
|
if stream:
|
||||||
return self._handle_generate_stream_response(model, credentials, response, prompt_messages)
|
return self._handle_generate_stream_response(model, credentials, response, prompt_messages)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user