From c0f6cfb1be9ff7764566aae760fc062e0f2eb828 Mon Sep 17 00:00:00 2001 From: Bowen Liang Date: Thu, 7 Nov 2024 23:10:26 +0800 Subject: [PATCH] apply TRY401 --- api/commands.py | 4 ++-- api/controllers/console/app/audio.py | 6 +++--- .../console/datasets/datasets_document.py | 2 +- api/controllers/console/workspace/models.py | 10 ++++++++-- api/controllers/web/audio.py | 4 ++-- api/core/app/apps/advanced_chat/app_generator.py | 2 +- .../apps/advanced_chat/generate_task_pipeline.py | 2 +- api/core/app/apps/message_based_app_generator.py | 2 +- api/core/app/apps/workflow/app_generator.py | 4 +++- .../app/apps/workflow/generate_task_pipeline.py | 2 +- api/core/app/task_pipeline/message_cycle_manage.py | 2 +- api/core/helper/moderation.py | 2 +- api/core/helper/module_import_helper.py | 2 +- api/core/indexing_runner.py | 2 +- api/core/llm_generator/llm_generator.py | 10 ++++++---- .../azure_ai_studio/rerank/rerank.py | 2 +- .../model_providers/sagemaker/rerank/rerank.py | 2 +- .../sagemaker/speech2text/speech2text.py | 2 +- .../sagemaker/text_embedding/text_embedding.py | 2 +- api/core/moderation/output_moderation.py | 2 +- api/core/ops/ops_trace_manager.py | 4 ++-- .../datasource/vdb/couchbase/couchbase_vector.py | 2 +- .../rag/datasource/vdb/lindorm/lindorm_vector.py | 8 ++++---- .../rag/datasource/vdb/myscale/myscale_vector.py | 2 +- .../datasource/vdb/opensearch/opensearch_vector.py | 2 +- api/core/rag/embedding/cached_embedding.py | 8 ++++---- api/core/rag/extractor/word_extractor.py | 2 +- .../processor/qa_index_processor.py | 2 +- api/core/tools/tool/workflow_tool.py | 2 +- api/core/tools/tool_file_manager.py | 2 +- api/core/tools/tool_manager.py | 2 +- api/core/tools/utils/message_transformer.py | 2 +- api/core/workflow/graph_engine/graph_engine.py | 4 ++-- api/core/workflow/nodes/base/node.py | 2 +- api/extensions/ext_storage.py | 14 +++++++------- api/libs/smtp.py | 6 +++--- api/models/dataset.py | 2 +- api/services/account_service.py | 4 ++-- api/services/app_service.py | 2 +- api/services/tools/api_tools_manage_service.py | 2 +- api/services/tools/tools_transform_service.py | 2 +- .../annotation/delete_annotation_index_task.py | 2 +- .../annotation/disable_annotation_reply_task.py | 2 +- .../annotation/enable_annotation_reply_task.py | 2 +- api/tasks/batch_create_segment_to_index_task.py | 2 +- 45 files changed, 80 insertions(+), 70 deletions(-) diff --git a/api/commands.py b/api/commands.py index 10122ceb3d..23787f38bf 100644 --- a/api/commands.py +++ b/api/commands.py @@ -589,7 +589,7 @@ def upgrade_db(): click.echo(click.style("Database migration successful!", fg="green")) except Exception as e: - logging.exception(f"Database migration failed: {e}") + logging.exception("Failed to execute database migration") finally: lock.release() else: @@ -633,7 +633,7 @@ where sites.id is null limit 1000""" except Exception as e: failed_app_ids.append(app_id) click.echo(click.style("Failed to fix missing site for app {}".format(app_id), fg="red")) - logging.exception(f"Fix app related site missing issue failed, error: {e}") + logging.exception(f"Failed to fix app related site missing issue, app_id: {app_id}") continue if not processed_count: diff --git a/api/controllers/console/app/audio.py b/api/controllers/console/app/audio.py index 112446613f..695b8890e3 100644 --- a/api/controllers/console/app/audio.py +++ b/api/controllers/console/app/audio.py @@ -70,7 +70,7 @@ class ChatMessageAudioApi(Resource): except ValueError as e: raise e except Exception as e: - logging.exception(f"internal server error, {str(e)}.") + logging.exception("Failed to handle post request to ChatMessageAudioApi") raise InternalServerError() @@ -128,7 +128,7 @@ class ChatMessageTextApi(Resource): except ValueError as e: raise e except Exception as e: - logging.exception(f"internal server error, {str(e)}.") + logging.exception("Failed to handle post request to ChatMessageTextApi") raise InternalServerError() @@ -170,7 +170,7 @@ class TextModesApi(Resource): except ValueError as e: raise e except Exception as e: - logging.exception(f"internal server error, {str(e)}.") + logging.exception("Failed to handle get request to TextModesApi") raise InternalServerError() diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 60848039c5..f38408525a 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -948,7 +948,7 @@ class DocumentRetryApi(DocumentResource): raise DocumentAlreadyFinishedError() retry_documents.append(document) except Exception as e: - logging.exception(f"Document {document_id} retry failed: {str(e)}") + logging.exception(f"Failed to retry document, document id: {document_id}") continue # retry document DocumentService.retry_document(dataset_id, retry_documents) diff --git a/api/controllers/console/workspace/models.py b/api/controllers/console/workspace/models.py index 57443cc3b3..f804285f00 100644 --- a/api/controllers/console/workspace/models.py +++ b/api/controllers/console/workspace/models.py @@ -72,7 +72,10 @@ class DefaultModelApi(Resource): model=model_setting["model"], ) except Exception as ex: - logging.exception(f"{model_setting['model_type']} save error: {ex}") + logging.exception( + f"Failed to update default model, model type: {model_setting['model_type']}," + f" model:{model_setting.get('model')}" + ) raise ex return {"result": "success"} @@ -156,7 +159,10 @@ class ModelProviderModelApi(Resource): credentials=args["credentials"], ) except CredentialsValidateFailedError as ex: - logging.exception(f"save model credentials error: {ex}") + logging.exception( + f"Failed to save model credentials, tenant_id: {tenant_id}," + f" model: {args.get('model')}, model_type: {args.get('model_type')}" + ) raise ValueError(str(ex)) return {"result": "success"}, 200 diff --git a/api/controllers/web/audio.py b/api/controllers/web/audio.py index 23550efe2e..e8521307ad 100644 --- a/api/controllers/web/audio.py +++ b/api/controllers/web/audio.py @@ -59,7 +59,7 @@ class AudioApi(WebApiResource): except ValueError as e: raise e except Exception as e: - logging.exception(f"internal server error: {str(e)}") + logging.exception("Failed to handle post request to AudioApi") raise InternalServerError() @@ -117,7 +117,7 @@ class TextApi(WebApiResource): except ValueError as e: raise e except Exception as e: - logging.exception(f"internal server error: {str(e)}") + logging.exception("Failed to handle post request to TextApi") raise InternalServerError() diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index 0b88345061..00e5a74732 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -362,5 +362,5 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): if e.args[0] == "I/O operation on closed file.": # ignore this error raise GenerateTaskStoppedError() else: - logger.exception(e) + logger.exception(f"Failed to process generate task pipeline, conversation_id: {conversation.id}") raise e diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index 1d4c0ea0fa..e1798957b9 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -242,7 +242,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc start_listener_time = time.time() yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id) except Exception as e: - logger.exception(e) + logger.exception(f"Failed to listen audio message, task_id: {task_id}") break if tts_publisher: yield MessageAudioEndStreamResponse(audio="", task_id=task_id) diff --git a/api/core/app/apps/message_based_app_generator.py b/api/core/app/apps/message_based_app_generator.py index bae64368e3..da206f01e7 100644 --- a/api/core/app/apps/message_based_app_generator.py +++ b/api/core/app/apps/message_based_app_generator.py @@ -80,7 +80,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): if e.args[0] == "I/O operation on closed file.": # ignore this error raise GenerateTaskStoppedError() else: - logger.exception(e) + logger.exception(f"Failed to handle response, conversation_id: {conversation.id}") raise e def _get_conversation_by_user( diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index a0080ece20..65da39b220 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -298,5 +298,7 @@ class WorkflowAppGenerator(BaseAppGenerator): if e.args[0] == "I/O operation on closed file.": # ignore this error raise GenerateTaskStoppedError() else: - logger.exception(e) + logger.exception( + f"Fails to process generate task pipeline, task_id: {application_generate_entity.task_id}" + ) raise e diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index aaa4824fe8..9e4921d6a2 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -216,7 +216,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa else: yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id) except Exception as e: - logger.exception(e) + logger.exception(f"Fails to get audio trunk, task_id: {task_id}") break if tts_publisher: yield MessageAudioEndStreamResponse(audio="", task_id=task_id) diff --git a/api/core/app/task_pipeline/message_cycle_manage.py b/api/core/app/task_pipeline/message_cycle_manage.py index 236eebf0b8..e818a090ed 100644 --- a/api/core/app/task_pipeline/message_cycle_manage.py +++ b/api/core/app/task_pipeline/message_cycle_manage.py @@ -86,7 +86,7 @@ class MessageCycleManage: conversation.name = name except Exception as e: if dify_config.DEBUG: - logging.exception(f"generate conversation name failed: {e}") + logging.exception(f"generate conversation name failed, conversation_id: {conversation_id}") pass db.session.merge(conversation) diff --git a/api/core/helper/moderation.py b/api/core/helper/moderation.py index b880590de2..da0fd0031c 100644 --- a/api/core/helper/moderation.py +++ b/api/core/helper/moderation.py @@ -41,7 +41,7 @@ def check_moderation(model_config: ModelConfigWithCredentialsEntity, text: str) if moderation_result is True: return True except Exception as ex: - logger.exception(ex) + logger.exception(f"Fails to check moderation, provider_name: {provider_name}") raise InvokeBadRequestError("Rate limit exceeded, please try again later.") return False diff --git a/api/core/helper/module_import_helper.py b/api/core/helper/module_import_helper.py index e6e1491548..1e2fefce88 100644 --- a/api/core/helper/module_import_helper.py +++ b/api/core/helper/module_import_helper.py @@ -29,7 +29,7 @@ def import_module_from_source(*, module_name: str, py_file_path: AnyStr, use_laz spec.loader.exec_module(module) return module except Exception as e: - logging.exception(f"Failed to load module {module_name} from {py_file_path}: {str(e)}") + logging.exception(f"Failed to load module {module_name} from script file '{py_file_path}'") raise e diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py index e2a94073cf..b4aee2621a 100644 --- a/api/core/indexing_runner.py +++ b/api/core/indexing_runner.py @@ -554,7 +554,7 @@ class IndexingRunner: qa_documents.append(qa_document) format_documents.extend(qa_documents) except Exception as e: - logging.exception(e) + logging.exception("Failed to format qa document") all_qa_documents.extend(format_documents) diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index 9cf9ed75c0..3a92c8d9d2 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -102,7 +102,7 @@ class LLMGenerator: except InvokeError: questions = [] except Exception as e: - logging.exception(e) + logging.exception("Failed to generate suggested questions after answer") questions = [] return questions @@ -148,7 +148,7 @@ class LLMGenerator: error = str(e) error_step = "generate rule config" except Exception as e: - logging.exception(e) + logging.exception(f"Failed to generate rule config, model: {model_config.get('name')}") rule_config["error"] = str(e) rule_config["error"] = f"Failed to {error_step}. Error: {error}" if error else "" @@ -234,7 +234,7 @@ class LLMGenerator: error_step = "generate conversation opener" except Exception as e: - logging.exception(e) + logging.exception(f"Failed to generate rule config, model: {model_config.get('name')}") rule_config["error"] = str(e) rule_config["error"] = f"Failed to {error_step}. Error: {error}" if error else "" @@ -286,7 +286,9 @@ class LLMGenerator: error = str(e) return {"code": "", "language": code_language, "error": f"Failed to generate code. Error: {error}"} except Exception as e: - logging.exception(e) + logging.exception( + f"Failed to invoke LLM model, model: {model_config.get('name')}, language: {code_language}" + ) return {"code": "", "language": code_language, "error": f"An unexpected error occurred: {str(e)}"} @classmethod diff --git a/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py b/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py index 84672520e0..9b75285e40 100644 --- a/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py +++ b/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py @@ -103,7 +103,7 @@ class AzureRerankModel(RerankModel): return RerankResult(model=model, docs=rerank_documents) except Exception as e: - logger.exception(f"Exception in Azure rerank: {e}") + logger.exception(f"Failed to invoke rerank model, model: {model}") raise def validate_credentials(self, model: str, credentials: dict) -> None: diff --git a/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py b/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py index 49c3fa5921..df797bae26 100644 --- a/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py +++ b/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py @@ -113,7 +113,7 @@ class SageMakerRerankModel(RerankModel): return RerankResult(model=model, docs=rerank_documents) except Exception as e: - logger.exception(f"Exception {e}, line : {line}") + logger.exception(f"Failed to invoke rerank model, model: {model}") def validate_credentials(self, model: str, credentials: dict) -> None: """ diff --git a/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py b/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py index 8fdf68abe1..2d50e9c7b4 100644 --- a/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py +++ b/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py @@ -78,7 +78,7 @@ class SageMakerSpeech2TextModel(Speech2TextModel): json_obj = json.loads(json_str) asr_text = json_obj["text"] except Exception as e: - logger.exception(f"failed to invoke speech2text model, {e}") + logger.exception(f"failed to invoke speech2text model, model: {model}") raise CredentialsValidateFailedError(str(e)) return asr_text diff --git a/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py index ececfda11a..ef4ddcd6a7 100644 --- a/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py +++ b/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py @@ -117,7 +117,7 @@ class SageMakerEmbeddingModel(TextEmbeddingModel): return TextEmbeddingResult(embeddings=all_embeddings, usage=usage, model=model) except Exception as e: - logger.exception(f"Exception {e}, line : {line}") + logger.exception(f"Failed to invoke text embedding model, model: {model}, line: {line}") def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: """ diff --git a/api/core/moderation/output_moderation.py b/api/core/moderation/output_moderation.py index 83f4d2d57d..4635bd9c25 100644 --- a/api/core/moderation/output_moderation.py +++ b/api/core/moderation/output_moderation.py @@ -126,6 +126,6 @@ class OutputModeration(BaseModel): result: ModerationOutputsResult = moderation_factory.moderation_for_outputs(moderation_buffer) return result except Exception as e: - logger.exception("Moderation Output error: %s", e) + logger.exception(f"Moderation Output error, app_id: {app_id}") return None diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index 79704c115f..1069889abd 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -711,7 +711,7 @@ class TraceQueueManager: trace_task.app_id = self.app_id trace_manager_queue.put(trace_task) except Exception as e: - logging.exception(f"Error adding trace task: {e}") + logging.exception(f"Error adding trace task, trace_type {trace_task.trace_type}") finally: self.start_timer() @@ -730,7 +730,7 @@ class TraceQueueManager: if tasks: self.send_to_celery(tasks) except Exception as e: - logging.exception(f"Error processing trace tasks: {e}") + logging.exception("Error processing trace tasks") def start_timer(self): global trace_manager_timer diff --git a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py index 98da5e3d5e..d26726e864 100644 --- a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py +++ b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py @@ -242,7 +242,7 @@ class CouchbaseVector(BaseVector): try: self._cluster.query(query, named_parameters={"doc_ids": ids}).execute() except Exception as e: - logger.exception(e) + logger.exception(f"Failed to delete documents, ids: {ids}") def delete_by_document_id(self, document_id: str): query = f""" diff --git a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py index 30d7f09ec2..60a1a89f1a 100644 --- a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py +++ b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py @@ -79,7 +79,7 @@ class LindormVectorStore(BaseVector): existing_docs = self._client.mget(index=self._collection_name, body={"ids": batch_ids}, _source=False) return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]} except Exception as e: - logger.exception(f"Error fetching batch {batch_ids}: {e}") + logger.exception(f"Error fetching batch {batch_ids}") return set() @retry(stop=stop_after_attempt(3), wait=wait_fixed(60)) @@ -96,7 +96,7 @@ class LindormVectorStore(BaseVector): ) return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]} except Exception as e: - logger.exception(f"Error fetching batch {batch_ids}: {e}") + logger.exception(f"Error fetching batch ids: {batch_ids}") return set() if ids is None: @@ -177,7 +177,7 @@ class LindormVectorStore(BaseVector): else: logger.warning(f"Index '{self._collection_name}' does not exist. No deletion performed.") except Exception as e: - logger.exception(f"Error occurred while deleting the index: {e}") + logger.exception(f"Error occurred while deleting the index: {self._collection_name}") raise e def text_exists(self, id: str) -> bool: @@ -201,7 +201,7 @@ class LindormVectorStore(BaseVector): try: response = self._client.search(index=self._collection_name, body=query) except Exception as e: - logger.exception(f"Error executing search: {e}") + logger.exception(f"Error executing vector search, query: {query}") raise docs_and_scores = [] diff --git a/api/core/rag/datasource/vdb/myscale/myscale_vector.py b/api/core/rag/datasource/vdb/myscale/myscale_vector.py index 2610b60a77..b7b6b803ad 100644 --- a/api/core/rag/datasource/vdb/myscale/myscale_vector.py +++ b/api/core/rag/datasource/vdb/myscale/myscale_vector.py @@ -142,7 +142,7 @@ class MyScaleVector(BaseVector): for r in self._client.query(sql).named_results() ] except Exception as e: - logging.exception(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") + logging.exception(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") # noqa:TRY401 return [] def delete(self) -> None: diff --git a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py index 49eb00f140..7a976d7c3c 100644 --- a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py +++ b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py @@ -158,7 +158,7 @@ class OpenSearchVector(BaseVector): try: response = self._client.search(index=self._collection_name.lower(), body=query) except Exception as e: - logger.exception(f"Error executing search: {e}") + logger.exception(f"Error executing vector search, query: {query}") raise docs = [] diff --git a/api/core/rag/embedding/cached_embedding.py b/api/core/rag/embedding/cached_embedding.py index 3ac65b88bb..1157c5c8e4 100644 --- a/api/core/rag/embedding/cached_embedding.py +++ b/api/core/rag/embedding/cached_embedding.py @@ -69,7 +69,7 @@ class CacheEmbedding(Embeddings): except IntegrityError: db.session.rollback() except Exception as e: - logging.exception("Failed transform embedding: %s", e) + logging.exception("Failed transform embedding") cache_embeddings = [] try: for i, embedding in zip(embedding_queue_indices, embedding_queue_embeddings): @@ -89,7 +89,7 @@ class CacheEmbedding(Embeddings): db.session.rollback() except Exception as ex: db.session.rollback() - logger.exception("Failed to embed documents: %s", ex) + logger.exception("Failed to embed documents: %s") raise ex return text_embeddings @@ -112,7 +112,7 @@ class CacheEmbedding(Embeddings): embedding_results = (embedding_results / np.linalg.norm(embedding_results)).tolist() except Exception as ex: if dify_config.DEBUG: - logging.exception(f"Failed to embed query text: {ex}") + logging.exception(f"Failed to embed query text '{text[:10]}...({len(text)} chars)'") raise ex try: @@ -126,7 +126,7 @@ class CacheEmbedding(Embeddings): redis_client.setex(embedding_cache_key, 600, encoded_str) except Exception as ex: if dify_config.DEBUG: - logging.exception("Failed to add embedding to redis %s", ex) + logging.exception(f"Failed to add embedding to redis for the text '{text[:10]}...({len(text)} chars)'") raise ex return embedding_results diff --git a/api/core/rag/extractor/word_extractor.py b/api/core/rag/extractor/word_extractor.py index 8e084ab4ff..313bdce48b 100644 --- a/api/core/rag/extractor/word_extractor.py +++ b/api/core/rag/extractor/word_extractor.py @@ -229,7 +229,7 @@ class WordExtractor(BaseExtractor): for i in url_pattern.findall(x.text): hyperlinks_url = str(i) except Exception as e: - logger.exception(e) + logger.exception("Failed to parse HYPERLINK xml") def parse_paragraph(paragraph): paragraph_content = [] diff --git a/api/core/rag/index_processor/processor/qa_index_processor.py b/api/core/rag/index_processor/processor/qa_index_processor.py index 1dbc473281..48e6cf7df7 100644 --- a/api/core/rag/index_processor/processor/qa_index_processor.py +++ b/api/core/rag/index_processor/processor/qa_index_processor.py @@ -159,7 +159,7 @@ class QAIndexProcessor(BaseIndexProcessor): qa_documents.append(qa_document) format_documents.extend(qa_documents) except Exception as e: - logging.exception(e) + logging.exception("Failed to format qa document") all_qa_documents.extend(format_documents) diff --git a/api/core/tools/tool/workflow_tool.py b/api/core/tools/tool/workflow_tool.py index 2ab72213ff..721fa06c54 100644 --- a/api/core/tools/tool/workflow_tool.py +++ b/api/core/tools/tool/workflow_tool.py @@ -175,7 +175,7 @@ class WorkflowTool(Tool): files.append(file_dict) except Exception as e: - logger.exception(e) + logger.exception(f"Failed to transform file {file}") else: parameters_result[parameter.name] = tool_parameters.get(parameter.name) diff --git a/api/core/tools/tool_file_manager.py b/api/core/tools/tool_file_manager.py index ff56e20e87..5052f0897a 100644 --- a/api/core/tools/tool_file_manager.py +++ b/api/core/tools/tool_file_manager.py @@ -98,7 +98,7 @@ class ToolFileManager: response.raise_for_status() blob = response.content except Exception as e: - logger.exception(f"Failed to download file from {file_url}: {e}") + logger.exception(f"Failed to download file from {file_url}") raise mimetype = guess_type(file_url)[0] or "octet/stream" diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index d2723df7b2..ac333162b6 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -388,7 +388,7 @@ class ToolManager: yield provider except Exception as e: - logger.exception(f"load builtin provider {provider} error: {e}") + logger.exception(f"load builtin provider {provider}") continue # set builtin providers loaded cls._builtin_providers_loaded = True diff --git a/api/core/tools/utils/message_transformer.py b/api/core/tools/utils/message_transformer.py index 1812d24571..e30c903a4b 100644 --- a/api/core/tools/utils/message_transformer.py +++ b/api/core/tools/utils/message_transformer.py @@ -40,7 +40,7 @@ class ToolFileMessageTransformer: ) ) except Exception as e: - logger.exception(e) + logger.exception(f"Failed to download image from {url}") result.append( ToolInvokeMessage( type=ToolInvokeMessage.MessageType.TEXT, diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index f07ad4de11..60a5901b21 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -172,7 +172,7 @@ class GraphEngine: "answer" ].strip() except Exception as e: - logger.exception(f"Graph run failed: {str(e)}") + logger.exception("Graph run failed") yield GraphRunFailedEvent(error=str(e)) return @@ -692,7 +692,7 @@ class GraphEngine: ) return except Exception as e: - logger.exception(f"Node {node_instance.node_data.title} run failed: {str(e)}") + logger.exception(f"Node {node_instance.node_data.title} run failed") raise e finally: db.session.close() diff --git a/api/core/workflow/nodes/base/node.py b/api/core/workflow/nodes/base/node.py index 1433c8eaed..1871fff618 100644 --- a/api/core/workflow/nodes/base/node.py +++ b/api/core/workflow/nodes/base/node.py @@ -69,7 +69,7 @@ class BaseNode(Generic[GenericNodeData]): try: result = self._run() except Exception as e: - logger.exception(f"Node {self.node_id} failed to run: {e}") + logger.exception(f"Node {self.node_id} failed to run") result = NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, error=str(e), diff --git a/api/extensions/ext_storage.py b/api/extensions/ext_storage.py index 86fadf23d7..fa88da68b7 100644 --- a/api/extensions/ext_storage.py +++ b/api/extensions/ext_storage.py @@ -70,7 +70,7 @@ class Storage: try: self.storage_runner.save(filename, data) except Exception as e: - logging.exception("Failed to save file: %s", e) + logging.exception(f"Failed to save file {filename}") raise e def load(self, filename: str, /, *, stream: bool = False) -> Union[bytes, Generator]: @@ -80,42 +80,42 @@ class Storage: else: return self.load_once(filename) except Exception as e: - logging.exception("Failed to load file: %s", e) + logging.exception(f"Failed to load file {filename}") raise e def load_once(self, filename: str) -> bytes: try: return self.storage_runner.load_once(filename) except Exception as e: - logging.exception("Failed to load_once file: %s", e) + logging.exception(f"Failed to load_once file {filename}") raise e def load_stream(self, filename: str) -> Generator: try: return self.storage_runner.load_stream(filename) except Exception as e: - logging.exception("Failed to load_stream file: %s", e) + logging.exception(f"Failed to load_stream file {filename}") raise e def download(self, filename, target_filepath): try: self.storage_runner.download(filename, target_filepath) except Exception as e: - logging.exception("Failed to download file: %s", e) + logging.exception(f"Failed to download file {filename}") raise e def exists(self, filename): try: return self.storage_runner.exists(filename) except Exception as e: - logging.exception("Failed to check file exists: %s", e) + logging.exception(f"Failed to check file exists {filename}") raise e def delete(self, filename): try: return self.storage_runner.delete(filename) except Exception as e: - logging.exception("Failed to delete file: %s", e) + logging.exception(f"Failed to delete file {filename}") raise e diff --git a/api/libs/smtp.py b/api/libs/smtp.py index d57d99f3b7..2325d69a41 100644 --- a/api/libs/smtp.py +++ b/api/libs/smtp.py @@ -39,13 +39,13 @@ class SMTPClient: smtp.sendmail(self._from, mail["to"], msg.as_string()) except smtplib.SMTPException as e: - logging.exception(f"SMTP error occurred: {str(e)}") + logging.exception("SMTP error occurred") raise except TimeoutError as e: - logging.exception(f"Timeout occurred while sending email: {str(e)}") + logging.exception("Timeout occurred while sending email") raise except Exception as e: - logging.exception(f"Unexpected error occurred while sending email: {str(e)}") + logging.exception(f"Unexpected error occurred while sending email to {mail['to']}") raise finally: if smtp: diff --git a/api/models/dataset.py b/api/models/dataset.py index a1a626d7e4..a8b2c419d1 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -679,7 +679,7 @@ class DatasetKeywordTable(db.Model): return json.loads(keyword_table_text.decode("utf-8"), cls=SetDecoder) return None except Exception as e: - logging.exception(str(e)) + logging.exception(f"Failed to load keyword table from file: {file_key}") return None diff --git a/api/services/account_service.py b/api/services/account_service.py index 963a055948..aacc1c85f6 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -779,7 +779,7 @@ class RegisterService: db.session.query(Tenant).delete() db.session.commit() - logging.exception(f"Setup failed: {e}") + logging.exception(f"Setup account failed, email: {email}, name: {name}") raise ValueError(f"Setup failed: {e}") @classmethod @@ -821,7 +821,7 @@ class RegisterService: db.session.rollback() except Exception as e: db.session.rollback() - logging.exception(f"Register failed: {e}") + logging.exception("Register failed") raise AccountRegisterError(f"Registration failed: {e}") from e return account diff --git a/api/services/app_service.py b/api/services/app_service.py index ac45d623e8..620d0ac270 100644 --- a/api/services/app_service.py +++ b/api/services/app_service.py @@ -88,7 +88,7 @@ class AppService: except (ProviderTokenNotInitError, LLMBadRequestError): model_instance = None except Exception as e: - logging.exception(e) + logging.exception(f"Get default model instance failed, tenant_id: {tenant_id}") model_instance = None if model_instance: diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index b6b0143fac..78a80f70ab 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -195,7 +195,7 @@ class ApiToolManageService: # try to parse schema, avoid SSRF attack ApiToolManageService.parser_api_schema(schema) except Exception as e: - logger.exception(f"parse api schema error: {str(e)}") + logger.exception("parse api schema error") raise ValueError("invalid schema, please check the url you provided") return {"schema": schema} diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index e535ddb575..1befa11531 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -183,7 +183,7 @@ class ToolTransformService: try: username = db_provider.user.name except Exception as e: - logger.exception(f"failed to get user name for api provider {db_provider.id}: {str(e)}") + logger.exception(f"failed to get user name for api provider {db_provider.id}") # add provider into providers credentials = db_provider.credentials result = UserToolProvider( diff --git a/api/tasks/annotation/delete_annotation_index_task.py b/api/tasks/annotation/delete_annotation_index_task.py index 5758db53de..f0f6b32b06 100644 --- a/api/tasks/annotation/delete_annotation_index_task.py +++ b/api/tasks/annotation/delete_annotation_index_task.py @@ -38,4 +38,4 @@ def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green") ) except Exception as e: - logging.exception("Annotation deleted index failed:{}".format(str(e))) + logging.exception("Annotation deleted index failed") diff --git a/api/tasks/annotation/disable_annotation_reply_task.py b/api/tasks/annotation/disable_annotation_reply_task.py index 0f83dfdbd4..a2f4913513 100644 --- a/api/tasks/annotation/disable_annotation_reply_task.py +++ b/api/tasks/annotation/disable_annotation_reply_task.py @@ -60,7 +60,7 @@ def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str): click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green") ) except Exception as e: - logging.exception("Annotation batch deleted index failed:{}".format(str(e))) + logging.exception("Annotation batch deleted index failed") redis_client.setex(disable_app_annotation_job_key, 600, "error") disable_app_annotation_error_key = "disable_app_annotation_error_{}".format(str(job_id)) redis_client.setex(disable_app_annotation_error_key, 600, str(e)) diff --git a/api/tasks/annotation/enable_annotation_reply_task.py b/api/tasks/annotation/enable_annotation_reply_task.py index 82b70f6b71..e819bf3635 100644 --- a/api/tasks/annotation/enable_annotation_reply_task.py +++ b/api/tasks/annotation/enable_annotation_reply_task.py @@ -93,7 +93,7 @@ def enable_annotation_reply_task( click.style("App annotations added to index: {} latency: {}".format(app_id, end_at - start_at), fg="green") ) except Exception as e: - logging.exception("Annotation batch created index failed:{}".format(str(e))) + logging.exception("Annotation batch created index failed") redis_client.setex(enable_app_annotation_job_key, 600, "error") enable_app_annotation_error_key = "enable_app_annotation_error_{}".format(str(job_id)) redis_client.setex(enable_app_annotation_error_key, 600, str(e)) diff --git a/api/tasks/batch_create_segment_to_index_task.py b/api/tasks/batch_create_segment_to_index_task.py index d1b41f2675..5ee72c27fc 100644 --- a/api/tasks/batch_create_segment_to_index_task.py +++ b/api/tasks/batch_create_segment_to_index_task.py @@ -103,5 +103,5 @@ def batch_create_segment_to_index_task( click.style("Segment batch created job: {} latency: {}".format(job_id, end_at - start_at), fg="green") ) except Exception as e: - logging.exception("Segments batch created index failed:{}".format(str(e))) + logging.exception("Segments batch created index failed") redis_client.setex(indexing_cache_key, 600, "error")