From b9b0866a467cc298ed0a2dd57ace660b841fe37e Mon Sep 17 00:00:00 2001 From: John Wang Date: Thu, 29 Jun 2023 12:54:50 +0800 Subject: [PATCH] fix: generate summary error when tokens=4097 (#488) --- api/core/generator/llm_generator.py | 5 ++++- api/tasks/generate_conversation_summary_task.py | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/api/core/generator/llm_generator.py b/api/core/generator/llm_generator.py index d4b48bf00a..87540272da 100644 --- a/api/core/generator/llm_generator.py +++ b/api/core/generator/llm_generator.py @@ -45,7 +45,7 @@ class LLMGenerator: prompt = CONVERSATION_SUMMARY_PROMPT prompt_with_empty_context = prompt.format(context='') prompt_tokens = TokenCalculator.get_num_tokens(model, prompt_with_empty_context) - rest_tokens = llm_constant.max_context_token_length[model] - prompt_tokens - max_tokens + rest_tokens = llm_constant.max_context_token_length[model] - prompt_tokens - max_tokens - 1 context = '' for message in messages: @@ -56,6 +56,9 @@ class LLMGenerator: if rest_tokens - TokenCalculator.get_num_tokens(model, context + message_qa_text) > 0: context += message_qa_text + if not context: + return '[message too long, no summary]' + prompt = prompt.format(context=context) llm: StreamableOpenAI = LLMBuilder.to_llm( diff --git a/api/tasks/generate_conversation_summary_task.py b/api/tasks/generate_conversation_summary_task.py index b19576f6fc..fcdd1ef6d8 100644 --- a/api/tasks/generate_conversation_summary_task.py +++ b/api/tasks/generate_conversation_summary_task.py @@ -28,7 +28,7 @@ def generate_conversation_summary_task(conversation_id: str): try: # get conversation messages count history_message_count = conversation.message_count - if history_message_count >= 5: + if history_message_count >= 5 and not conversation.summary: app_model = conversation.app if not app_model: return