fix: use Gemini response metadata for token counting (#11226)

This commit is contained in:
Shota Totsuka 2024-11-30 18:30:55 +09:00 committed by GitHub
parent e80f41a701
commit 594666eb61
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -254,8 +254,12 @@ class GoogleLargeLanguageModel(LargeLanguageModel):
assistant_prompt_message = AssistantPromptMessage(content=response.text)
# calculate num tokens
prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages)
completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message])
if response.usage_metadata:
prompt_tokens = response.usage_metadata.prompt_token_count
completion_tokens = response.usage_metadata.candidates_token_count
else:
prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages)
completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message])
# transform usage
usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens)