fix incorrect indent in TokenBufferMemory (#25215)

This commit is contained in:
Will 2025-09-05 14:01:07 +08:00 committed by GitHub
parent e3cbe85db4
commit 110b6a0863
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 4 additions and 4 deletions

View File

@ -167,11 +167,11 @@ class TokenBufferMemory:
else:
prompt_messages.append(AssistantPromptMessage(content=message.answer))
if not prompt_messages:
return []
if not prompt_messages:
return []
# prune the chat message if it exceeds the max token limit
curr_message_tokens = self.model_instance.get_llm_num_tokens(prompt_messages)
# prune the chat message if it exceeds the max token limit
curr_message_tokens = self.model_instance.get_llm_num_tokens(prompt_messages)
if curr_message_tokens > max_token_limit:
while curr_message_tokens > max_token_limit and len(prompt_messages) > 1: