fix: fix tongyi models blocking mode with incremental_output=stream (#13620)

This commit is contained in:
Yingchun Lai 2025-02-13 10:24:05 +08:00 committed by GitHub
parent 2b86465d4c
commit a3d3e30e3a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -197,7 +197,7 @@ class TongyiLargeLanguageModel(LargeLanguageModel):
else:
# nothing different between chat model and completion model in tongyi
params["messages"] = self._convert_prompt_messages_to_tongyi_messages(prompt_messages)
response = Generation.call(**params, result_format="message", stream=stream, incremental_output=True)
response = Generation.call(**params, result_format="message", stream=stream, incremental_output=stream)
if stream:
return self._handle_generate_stream_response(model, credentials, response, prompt_messages)