From f535a2aa71d2c898981b5a5d34e725e34715d7e4 Mon Sep 17 00:00:00 2001 From: jiangbo721 <365065261@qq.com> Date: Wed, 19 Feb 2025 09:14:10 +0800 Subject: [PATCH] =?UTF-8?q?chore:=20prompt=5Fmessage=20is=20actually=20ass?= =?UTF-8?q?istant=5Fmessage=20which=20is=20a=20bit=20am=E2=80=A6=20(#13839?= =?UTF-8?q?)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: 刘江波 --- .../model_providers/__base/large_language_model.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/api/core/model_runtime/model_providers/__base/large_language_model.py b/api/core/model_runtime/model_providers/__base/large_language_model.py index 7f8de7cbab..ed67fef768 100644 --- a/api/core/model_runtime/model_providers/__base/large_language_model.py +++ b/api/core/model_runtime/model_providers/__base/large_language_model.py @@ -228,7 +228,7 @@ class LargeLanguageModel(AIModel): :return: result generator """ callbacks = callbacks or [] - prompt_message = AssistantPromptMessage(content="") + assistant_message = AssistantPromptMessage(content="") usage = None system_fingerprint = None real_model = model @@ -250,7 +250,7 @@ class LargeLanguageModel(AIModel): callbacks=callbacks, ) - prompt_message.content += chunk.delta.message.content + assistant_message.content += chunk.delta.message.content real_model = chunk.model if chunk.delta.usage: usage = chunk.delta.usage @@ -265,7 +265,7 @@ class LargeLanguageModel(AIModel): result=LLMResult( model=real_model, prompt_messages=prompt_messages, - message=prompt_message, + message=assistant_message, usage=usage or LLMUsage.empty_usage(), system_fingerprint=system_fingerprint, ),