mirror of
https://github.com/langgenius/dify.git
synced 2024-11-16 11:42:29 +08:00
Compare commits
2 Commits
45369a1897
...
e92d3237b4
Author | SHA1 | Date | |
---|---|---|---|
|
e92d3237b4 | ||
|
2e0da361b0 |
|
@ -589,7 +589,7 @@ def upgrade_db():
|
|||
click.echo(click.style("Database migration successful!", fg="green"))
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Database migration failed: {e}")
|
||||
logging.exception("Failed to execute database migration")
|
||||
finally:
|
||||
lock.release()
|
||||
else:
|
||||
|
@ -633,7 +633,7 @@ where sites.id is null limit 1000"""
|
|||
except Exception as e:
|
||||
failed_app_ids.append(app_id)
|
||||
click.echo(click.style("Failed to fix missing site for app {}".format(app_id), fg="red"))
|
||||
logging.exception(f"Fix app related site missing issue failed, error: {e}")
|
||||
logging.exception(f"Failed to fix app related site missing issue, app_id: {app_id}")
|
||||
continue
|
||||
|
||||
if not processed_count:
|
||||
|
|
|
@ -70,7 +70,7 @@ class ChatMessageAudioApi(Resource):
|
|||
except ValueError as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
logging.exception(f"internal server error, {str(e)}.")
|
||||
logging.exception("Failed to handle post request to ChatMessageAudioApi")
|
||||
raise InternalServerError()
|
||||
|
||||
|
||||
|
@ -128,7 +128,7 @@ class ChatMessageTextApi(Resource):
|
|||
except ValueError as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
logging.exception(f"internal server error, {str(e)}.")
|
||||
logging.exception("Failed to handle post request to ChatMessageTextApi")
|
||||
raise InternalServerError()
|
||||
|
||||
|
||||
|
@ -170,7 +170,7 @@ class TextModesApi(Resource):
|
|||
except ValueError as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
logging.exception(f"internal server error, {str(e)}.")
|
||||
logging.exception("Failed to handle get request to TextModesApi")
|
||||
raise InternalServerError()
|
||||
|
||||
|
||||
|
|
|
@ -948,7 +948,7 @@ class DocumentRetryApi(DocumentResource):
|
|||
raise DocumentAlreadyFinishedError()
|
||||
retry_documents.append(document)
|
||||
except Exception as e:
|
||||
logging.exception(f"Document {document_id} retry failed: {str(e)}")
|
||||
logging.exception(f"Failed to retry document, document id: {document_id}")
|
||||
continue
|
||||
# retry document
|
||||
DocumentService.retry_document(dataset_id, retry_documents)
|
||||
|
|
|
@ -72,7 +72,10 @@ class DefaultModelApi(Resource):
|
|||
model=model_setting["model"],
|
||||
)
|
||||
except Exception as ex:
|
||||
logging.exception(f"{model_setting['model_type']} save error: {ex}")
|
||||
logging.exception(
|
||||
f"Failed to update default model, model type: {model_setting['model_type']},"
|
||||
f" model:{model_setting.get('model')}"
|
||||
)
|
||||
raise ex
|
||||
|
||||
return {"result": "success"}
|
||||
|
@ -156,7 +159,10 @@ class ModelProviderModelApi(Resource):
|
|||
credentials=args["credentials"],
|
||||
)
|
||||
except CredentialsValidateFailedError as ex:
|
||||
logging.exception(f"save model credentials error: {ex}")
|
||||
logging.exception(
|
||||
f"Failed to save model credentials, tenant_id: {tenant_id},"
|
||||
f" model: {args.get('model')}, model_type: {args.get('model_type')}"
|
||||
)
|
||||
raise ValueError(str(ex))
|
||||
|
||||
return {"result": "success"}, 200
|
||||
|
|
|
@ -59,7 +59,7 @@ class AudioApi(WebApiResource):
|
|||
except ValueError as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
logging.exception(f"internal server error: {str(e)}")
|
||||
logging.exception("Failed to handle post request to AudioApi")
|
||||
raise InternalServerError()
|
||||
|
||||
|
||||
|
@ -117,7 +117,7 @@ class TextApi(WebApiResource):
|
|||
except ValueError as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
logging.exception(f"internal server error: {str(e)}")
|
||||
logging.exception("Failed to handle post request to TextApi")
|
||||
raise InternalServerError()
|
||||
|
||||
|
||||
|
|
|
@ -362,5 +362,5 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
if e.args[0] == "I/O operation on closed file.": # ignore this error
|
||||
raise GenerateTaskStoppedError()
|
||||
else:
|
||||
logger.exception(e)
|
||||
logger.exception(f"Failed to process generate task pipeline, conversation_id: {conversation.id}")
|
||||
raise e
|
||||
|
|
|
@ -242,7 +242,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
|
|||
start_listener_time = time.time()
|
||||
yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
logger.exception(f"Failed to listen audio message, task_id: {task_id}")
|
||||
break
|
||||
if tts_publisher:
|
||||
yield MessageAudioEndStreamResponse(audio="", task_id=task_id)
|
||||
|
|
|
@ -80,7 +80,7 @@ class MessageBasedAppGenerator(BaseAppGenerator):
|
|||
if e.args[0] == "I/O operation on closed file.": # ignore this error
|
||||
raise GenerateTaskStoppedError()
|
||||
else:
|
||||
logger.exception(e)
|
||||
logger.exception(f"Failed to handle response, conversation_id: {conversation.id}")
|
||||
raise e
|
||||
|
||||
def _get_conversation_by_user(
|
||||
|
|
|
@ -298,5 +298,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
if e.args[0] == "I/O operation on closed file.": # ignore this error
|
||||
raise GenerateTaskStoppedError()
|
||||
else:
|
||||
logger.exception(e)
|
||||
logger.exception(
|
||||
f"Fails to process generate task pipeline, task_id: {application_generate_entity.task_id}"
|
||||
)
|
||||
raise e
|
||||
|
|
|
@ -216,7 +216,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
|
|||
else:
|
||||
yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
logger.exception(f"Fails to get audio trunk, task_id: {task_id}")
|
||||
break
|
||||
if tts_publisher:
|
||||
yield MessageAudioEndStreamResponse(audio="", task_id=task_id)
|
||||
|
|
|
@ -86,7 +86,7 @@ class MessageCycleManage:
|
|||
conversation.name = name
|
||||
except Exception as e:
|
||||
if dify_config.DEBUG:
|
||||
logging.exception(f"generate conversation name failed: {e}")
|
||||
logging.exception(f"generate conversation name failed, conversation_id: {conversation_id}")
|
||||
pass
|
||||
|
||||
db.session.merge(conversation)
|
||||
|
|
|
@ -41,7 +41,7 @@ def check_moderation(model_config: ModelConfigWithCredentialsEntity, text: str)
|
|||
if moderation_result is True:
|
||||
return True
|
||||
except Exception as ex:
|
||||
logger.exception(ex)
|
||||
logger.exception(f"Fails to check moderation, provider_name: {provider_name}")
|
||||
raise InvokeBadRequestError("Rate limit exceeded, please try again later.")
|
||||
|
||||
return False
|
||||
|
|
|
@ -29,7 +29,7 @@ def import_module_from_source(*, module_name: str, py_file_path: AnyStr, use_laz
|
|||
spec.loader.exec_module(module)
|
||||
return module
|
||||
except Exception as e:
|
||||
logging.exception(f"Failed to load module {module_name} from {py_file_path}: {str(e)}")
|
||||
logging.exception(f"Failed to load module {module_name} from script file '{py_file_path}'")
|
||||
raise e
|
||||
|
||||
|
||||
|
|
|
@ -554,7 +554,7 @@ class IndexingRunner:
|
|||
qa_documents.append(qa_document)
|
||||
format_documents.extend(qa_documents)
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
logging.exception("Failed to format qa document")
|
||||
|
||||
all_qa_documents.extend(format_documents)
|
||||
|
||||
|
|
|
@ -102,7 +102,7 @@ class LLMGenerator:
|
|||
except InvokeError:
|
||||
questions = []
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
logging.exception("Failed to generate suggested questions after answer")
|
||||
questions = []
|
||||
|
||||
return questions
|
||||
|
@ -148,7 +148,7 @@ class LLMGenerator:
|
|||
error = str(e)
|
||||
error_step = "generate rule config"
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
logging.exception(f"Failed to generate rule config, model: {model_config.get('name')}")
|
||||
rule_config["error"] = str(e)
|
||||
|
||||
rule_config["error"] = f"Failed to {error_step}. Error: {error}" if error else ""
|
||||
|
@ -234,7 +234,7 @@ class LLMGenerator:
|
|||
error_step = "generate conversation opener"
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
logging.exception(f"Failed to generate rule config, model: {model_config.get('name')}")
|
||||
rule_config["error"] = str(e)
|
||||
|
||||
rule_config["error"] = f"Failed to {error_step}. Error: {error}" if error else ""
|
||||
|
@ -286,7 +286,9 @@ class LLMGenerator:
|
|||
error = str(e)
|
||||
return {"code": "", "language": code_language, "error": f"Failed to generate code. Error: {error}"}
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
logging.exception(
|
||||
f"Failed to invoke LLM model, model: {model_config.get('name')}, language: {code_language}"
|
||||
)
|
||||
return {"code": "", "language": code_language, "error": f"An unexpected error occurred: {str(e)}"}
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -103,7 +103,7 @@ class AzureRerankModel(RerankModel):
|
|||
return RerankResult(model=model, docs=rerank_documents)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Exception in Azure rerank: {e}")
|
||||
logger.exception(f"Failed to invoke rerank model, model: {model}")
|
||||
raise
|
||||
|
||||
def validate_credentials(self, model: str, credentials: dict) -> None:
|
||||
|
|
|
@ -113,7 +113,7 @@ class SageMakerRerankModel(RerankModel):
|
|||
return RerankResult(model=model, docs=rerank_documents)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Exception {e}, line : {line}")
|
||||
logger.exception(f"Failed to invoke rerank model, model: {model}")
|
||||
|
||||
def validate_credentials(self, model: str, credentials: dict) -> None:
|
||||
"""
|
||||
|
|
|
@ -78,7 +78,7 @@ class SageMakerSpeech2TextModel(Speech2TextModel):
|
|||
json_obj = json.loads(json_str)
|
||||
asr_text = json_obj["text"]
|
||||
except Exception as e:
|
||||
logger.exception(f"failed to invoke speech2text model, {e}")
|
||||
logger.exception(f"failed to invoke speech2text model, model: {model}")
|
||||
raise CredentialsValidateFailedError(str(e))
|
||||
|
||||
return asr_text
|
||||
|
|
|
@ -117,7 +117,7 @@ class SageMakerEmbeddingModel(TextEmbeddingModel):
|
|||
return TextEmbeddingResult(embeddings=all_embeddings, usage=usage, model=model)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Exception {e}, line : {line}")
|
||||
logger.exception(f"Failed to invoke text embedding model, model: {model}, line: {line}")
|
||||
|
||||
def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int:
|
||||
"""
|
||||
|
|
|
@ -126,6 +126,6 @@ class OutputModeration(BaseModel):
|
|||
result: ModerationOutputsResult = moderation_factory.moderation_for_outputs(moderation_buffer)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.exception("Moderation Output error: %s", e)
|
||||
logger.exception(f"Moderation Output error, app_id: {app_id}")
|
||||
|
||||
return None
|
||||
|
|
|
@ -711,7 +711,7 @@ class TraceQueueManager:
|
|||
trace_task.app_id = self.app_id
|
||||
trace_manager_queue.put(trace_task)
|
||||
except Exception as e:
|
||||
logging.exception(f"Error adding trace task: {e}")
|
||||
logging.exception(f"Error adding trace task, trace_type {trace_task.trace_type}")
|
||||
finally:
|
||||
self.start_timer()
|
||||
|
||||
|
@ -730,7 +730,7 @@ class TraceQueueManager:
|
|||
if tasks:
|
||||
self.send_to_celery(tasks)
|
||||
except Exception as e:
|
||||
logging.exception(f"Error processing trace tasks: {e}")
|
||||
logging.exception("Error processing trace tasks")
|
||||
|
||||
def start_timer(self):
|
||||
global trace_manager_timer
|
||||
|
|
|
@ -242,7 +242,7 @@ class CouchbaseVector(BaseVector):
|
|||
try:
|
||||
self._cluster.query(query, named_parameters={"doc_ids": ids}).execute()
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
logger.exception(f"Failed to delete documents, ids: {ids}")
|
||||
|
||||
def delete_by_document_id(self, document_id: str):
|
||||
query = f"""
|
||||
|
|
|
@ -79,7 +79,7 @@ class LindormVectorStore(BaseVector):
|
|||
existing_docs = self._client.mget(index=self._collection_name, body={"ids": batch_ids}, _source=False)
|
||||
return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]}
|
||||
except Exception as e:
|
||||
logger.exception(f"Error fetching batch {batch_ids}: {e}")
|
||||
logger.exception(f"Error fetching batch {batch_ids}")
|
||||
return set()
|
||||
|
||||
@retry(stop=stop_after_attempt(3), wait=wait_fixed(60))
|
||||
|
@ -96,7 +96,7 @@ class LindormVectorStore(BaseVector):
|
|||
)
|
||||
return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]}
|
||||
except Exception as e:
|
||||
logger.exception(f"Error fetching batch {batch_ids}: {e}")
|
||||
logger.exception(f"Error fetching batch ids: {batch_ids}")
|
||||
return set()
|
||||
|
||||
if ids is None:
|
||||
|
@ -177,7 +177,7 @@ class LindormVectorStore(BaseVector):
|
|||
else:
|
||||
logger.warning(f"Index '{self._collection_name}' does not exist. No deletion performed.")
|
||||
except Exception as e:
|
||||
logger.exception(f"Error occurred while deleting the index: {e}")
|
||||
logger.exception(f"Error occurred while deleting the index: {self._collection_name}")
|
||||
raise e
|
||||
|
||||
def text_exists(self, id: str) -> bool:
|
||||
|
@ -201,7 +201,7 @@ class LindormVectorStore(BaseVector):
|
|||
try:
|
||||
response = self._client.search(index=self._collection_name, body=query)
|
||||
except Exception as e:
|
||||
logger.exception(f"Error executing search: {e}")
|
||||
logger.exception(f"Error executing vector search, query: {query}")
|
||||
raise
|
||||
|
||||
docs_and_scores = []
|
||||
|
|
|
@ -142,7 +142,7 @@ class MyScaleVector(BaseVector):
|
|||
for r in self._client.query(sql).named_results()
|
||||
]
|
||||
except Exception as e:
|
||||
logging.exception(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m")
|
||||
logging.exception(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") # noqa:TRY401
|
||||
return []
|
||||
|
||||
def delete(self) -> None:
|
||||
|
|
|
@ -158,7 +158,7 @@ class OpenSearchVector(BaseVector):
|
|||
try:
|
||||
response = self._client.search(index=self._collection_name.lower(), body=query)
|
||||
except Exception as e:
|
||||
logger.exception(f"Error executing search: {e}")
|
||||
logger.exception(f"Error executing vector search, query: {query}")
|
||||
raise
|
||||
|
||||
docs = []
|
||||
|
|
|
@ -69,7 +69,7 @@ class CacheEmbedding(Embeddings):
|
|||
except IntegrityError:
|
||||
db.session.rollback()
|
||||
except Exception as e:
|
||||
logging.exception("Failed transform embedding: %s", e)
|
||||
logging.exception("Failed transform embedding")
|
||||
cache_embeddings = []
|
||||
try:
|
||||
for i, embedding in zip(embedding_queue_indices, embedding_queue_embeddings):
|
||||
|
@ -89,7 +89,7 @@ class CacheEmbedding(Embeddings):
|
|||
db.session.rollback()
|
||||
except Exception as ex:
|
||||
db.session.rollback()
|
||||
logger.exception("Failed to embed documents: %s", ex)
|
||||
logger.exception("Failed to embed documents: %s")
|
||||
raise ex
|
||||
|
||||
return text_embeddings
|
||||
|
@ -112,7 +112,7 @@ class CacheEmbedding(Embeddings):
|
|||
embedding_results = (embedding_results / np.linalg.norm(embedding_results)).tolist()
|
||||
except Exception as ex:
|
||||
if dify_config.DEBUG:
|
||||
logging.exception(f"Failed to embed query text: {ex}")
|
||||
logging.exception(f"Failed to embed query text '{text[:10]}...({len(text)} chars)'")
|
||||
raise ex
|
||||
|
||||
try:
|
||||
|
@ -126,7 +126,7 @@ class CacheEmbedding(Embeddings):
|
|||
redis_client.setex(embedding_cache_key, 600, encoded_str)
|
||||
except Exception as ex:
|
||||
if dify_config.DEBUG:
|
||||
logging.exception("Failed to add embedding to redis %s", ex)
|
||||
logging.exception(f"Failed to add embedding to redis for the text '{text[:10]}...({len(text)} chars)'")
|
||||
raise ex
|
||||
|
||||
return embedding_results
|
||||
|
|
|
@ -229,7 +229,7 @@ class WordExtractor(BaseExtractor):
|
|||
for i in url_pattern.findall(x.text):
|
||||
hyperlinks_url = str(i)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
logger.exception("Failed to parse HYPERLINK xml")
|
||||
|
||||
def parse_paragraph(paragraph):
|
||||
paragraph_content = []
|
||||
|
|
|
@ -159,7 +159,7 @@ class QAIndexProcessor(BaseIndexProcessor):
|
|||
qa_documents.append(qa_document)
|
||||
format_documents.extend(qa_documents)
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
logging.exception("Failed to format qa document")
|
||||
|
||||
all_qa_documents.extend(format_documents)
|
||||
|
||||
|
|
|
@ -175,7 +175,7 @@ class WorkflowTool(Tool):
|
|||
|
||||
files.append(file_dict)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
logger.exception(f"Failed to transform file {file}")
|
||||
else:
|
||||
parameters_result[parameter.name] = tool_parameters.get(parameter.name)
|
||||
|
||||
|
|
|
@ -98,7 +98,7 @@ class ToolFileManager:
|
|||
response.raise_for_status()
|
||||
blob = response.content
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to download file from {file_url}: {e}")
|
||||
logger.exception(f"Failed to download file from {file_url}")
|
||||
raise
|
||||
|
||||
mimetype = guess_type(file_url)[0] or "octet/stream"
|
||||
|
|
|
@ -388,7 +388,7 @@ class ToolManager:
|
|||
yield provider
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"load builtin provider {provider} error: {e}")
|
||||
logger.exception(f"load builtin provider {provider}")
|
||||
continue
|
||||
# set builtin providers loaded
|
||||
cls._builtin_providers_loaded = True
|
||||
|
|
|
@ -40,7 +40,7 @@ class ToolFileMessageTransformer:
|
|||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
logger.exception(f"Failed to download image from {url}")
|
||||
result.append(
|
||||
ToolInvokeMessage(
|
||||
type=ToolInvokeMessage.MessageType.TEXT,
|
||||
|
|
|
@ -172,7 +172,7 @@ class GraphEngine:
|
|||
"answer"
|
||||
].strip()
|
||||
except Exception as e:
|
||||
logger.exception(f"Graph run failed: {str(e)}")
|
||||
logger.exception("Graph run failed")
|
||||
yield GraphRunFailedEvent(error=str(e))
|
||||
return
|
||||
|
||||
|
@ -692,7 +692,7 @@ class GraphEngine:
|
|||
)
|
||||
return
|
||||
except Exception as e:
|
||||
logger.exception(f"Node {node_instance.node_data.title} run failed: {str(e)}")
|
||||
logger.exception(f"Node {node_instance.node_data.title} run failed")
|
||||
raise e
|
||||
finally:
|
||||
db.session.close()
|
||||
|
|
|
@ -69,7 +69,7 @@ class BaseNode(Generic[GenericNodeData]):
|
|||
try:
|
||||
result = self._run()
|
||||
except Exception as e:
|
||||
logger.exception(f"Node {self.node_id} failed to run: {e}")
|
||||
logger.exception(f"Node {self.node_id} failed to run")
|
||||
result = NodeRunResult(
|
||||
status=WorkflowNodeExecutionStatus.FAILED,
|
||||
error=str(e),
|
||||
|
|
|
@ -70,7 +70,7 @@ class Storage:
|
|||
try:
|
||||
self.storage_runner.save(filename, data)
|
||||
except Exception as e:
|
||||
logging.exception("Failed to save file: %s", e)
|
||||
logging.exception(f"Failed to save file {filename}")
|
||||
raise e
|
||||
|
||||
def load(self, filename: str, /, *, stream: bool = False) -> Union[bytes, Generator]:
|
||||
|
@ -80,42 +80,42 @@ class Storage:
|
|||
else:
|
||||
return self.load_once(filename)
|
||||
except Exception as e:
|
||||
logging.exception("Failed to load file: %s", e)
|
||||
logging.exception(f"Failed to load file {filename}")
|
||||
raise e
|
||||
|
||||
def load_once(self, filename: str) -> bytes:
|
||||
try:
|
||||
return self.storage_runner.load_once(filename)
|
||||
except Exception as e:
|
||||
logging.exception("Failed to load_once file: %s", e)
|
||||
logging.exception(f"Failed to load_once file {filename}")
|
||||
raise e
|
||||
|
||||
def load_stream(self, filename: str) -> Generator:
|
||||
try:
|
||||
return self.storage_runner.load_stream(filename)
|
||||
except Exception as e:
|
||||
logging.exception("Failed to load_stream file: %s", e)
|
||||
logging.exception(f"Failed to load_stream file {filename}")
|
||||
raise e
|
||||
|
||||
def download(self, filename, target_filepath):
|
||||
try:
|
||||
self.storage_runner.download(filename, target_filepath)
|
||||
except Exception as e:
|
||||
logging.exception("Failed to download file: %s", e)
|
||||
logging.exception(f"Failed to download file {filename}")
|
||||
raise e
|
||||
|
||||
def exists(self, filename):
|
||||
try:
|
||||
return self.storage_runner.exists(filename)
|
||||
except Exception as e:
|
||||
logging.exception("Failed to check file exists: %s", e)
|
||||
logging.exception(f"Failed to check file exists {filename}")
|
||||
raise e
|
||||
|
||||
def delete(self, filename):
|
||||
try:
|
||||
return self.storage_runner.delete(filename)
|
||||
except Exception as e:
|
||||
logging.exception("Failed to delete file: %s", e)
|
||||
logging.exception(f"Failed to delete file {filename}")
|
||||
raise e
|
||||
|
||||
|
||||
|
|
|
@ -39,13 +39,13 @@ class SMTPClient:
|
|||
|
||||
smtp.sendmail(self._from, mail["to"], msg.as_string())
|
||||
except smtplib.SMTPException as e:
|
||||
logging.exception(f"SMTP error occurred: {str(e)}")
|
||||
logging.exception("SMTP error occurred")
|
||||
raise
|
||||
except TimeoutError as e:
|
||||
logging.exception(f"Timeout occurred while sending email: {str(e)}")
|
||||
logging.exception("Timeout occurred while sending email")
|
||||
raise
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error occurred while sending email: {str(e)}")
|
||||
logging.exception(f"Unexpected error occurred while sending email to {mail['to']}")
|
||||
raise
|
||||
finally:
|
||||
if smtp:
|
||||
|
|
|
@ -679,7 +679,7 @@ class DatasetKeywordTable(db.Model):
|
|||
return json.loads(keyword_table_text.decode("utf-8"), cls=SetDecoder)
|
||||
return None
|
||||
except Exception as e:
|
||||
logging.exception(str(e))
|
||||
logging.exception(f"Failed to load keyword table from file: {file_key}")
|
||||
return None
|
||||
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ select = [
|
|||
"S506", # unsafe-yaml-load
|
||||
"SIM", # flake8-simplify rules
|
||||
"TRY400", # error-instead-of-exception
|
||||
"TRY401", # verbose-log-message
|
||||
"UP", # pyupgrade rules
|
||||
"W191", # tab-indentation
|
||||
"W605", # invalid-escape-sequence
|
||||
|
|
|
@ -779,7 +779,7 @@ class RegisterService:
|
|||
db.session.query(Tenant).delete()
|
||||
db.session.commit()
|
||||
|
||||
logging.exception(f"Setup failed: {e}")
|
||||
logging.exception(f"Setup account failed, email: {email}, name: {name}")
|
||||
raise ValueError(f"Setup failed: {e}")
|
||||
|
||||
@classmethod
|
||||
|
@ -821,7 +821,7 @@ class RegisterService:
|
|||
db.session.rollback()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
logging.exception(f"Register failed: {e}")
|
||||
logging.exception("Register failed")
|
||||
raise AccountRegisterError(f"Registration failed: {e}") from e
|
||||
|
||||
return account
|
||||
|
|
|
@ -88,7 +88,7 @@ class AppService:
|
|||
except (ProviderTokenNotInitError, LLMBadRequestError):
|
||||
model_instance = None
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
logging.exception(f"Get default model instance failed, tenant_id: {tenant_id}")
|
||||
model_instance = None
|
||||
|
||||
if model_instance:
|
||||
|
|
|
@ -193,7 +193,7 @@ class ApiToolManageService:
|
|||
# try to parse schema, avoid SSRF attack
|
||||
ApiToolManageService.parser_api_schema(schema)
|
||||
except Exception as e:
|
||||
logger.exception(f"parse api schema error: {str(e)}")
|
||||
logger.exception("parse api schema error")
|
||||
raise ValueError("invalid schema, please check the url you provided")
|
||||
|
||||
return {"schema": schema}
|
||||
|
|
|
@ -183,7 +183,7 @@ class ToolTransformService:
|
|||
try:
|
||||
username = db_provider.user.name
|
||||
except Exception as e:
|
||||
logger.exception(f"failed to get user name for api provider {db_provider.id}: {str(e)}")
|
||||
logger.exception(f"failed to get user name for api provider {db_provider.id}")
|
||||
# add provider into providers
|
||||
credentials = db_provider.credentials
|
||||
result = UserToolProvider(
|
||||
|
|
|
@ -38,4 +38,4 @@ def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str
|
|||
click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green")
|
||||
)
|
||||
except Exception as e:
|
||||
logging.exception("Annotation deleted index failed:{}".format(str(e)))
|
||||
logging.exception("Annotation deleted index failed")
|
||||
|
|
|
@ -60,7 +60,7 @@ def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str):
|
|||
click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green")
|
||||
)
|
||||
except Exception as e:
|
||||
logging.exception("Annotation batch deleted index failed:{}".format(str(e)))
|
||||
logging.exception("Annotation batch deleted index failed")
|
||||
redis_client.setex(disable_app_annotation_job_key, 600, "error")
|
||||
disable_app_annotation_error_key = "disable_app_annotation_error_{}".format(str(job_id))
|
||||
redis_client.setex(disable_app_annotation_error_key, 600, str(e))
|
||||
|
|
|
@ -93,7 +93,7 @@ def enable_annotation_reply_task(
|
|||
click.style("App annotations added to index: {} latency: {}".format(app_id, end_at - start_at), fg="green")
|
||||
)
|
||||
except Exception as e:
|
||||
logging.exception("Annotation batch created index failed:{}".format(str(e)))
|
||||
logging.exception("Annotation batch created index failed")
|
||||
redis_client.setex(enable_app_annotation_job_key, 600, "error")
|
||||
enable_app_annotation_error_key = "enable_app_annotation_error_{}".format(str(job_id))
|
||||
redis_client.setex(enable_app_annotation_error_key, 600, str(e))
|
||||
|
|
|
@ -103,5 +103,5 @@ def batch_create_segment_to_index_task(
|
|||
click.style("Segment batch created job: {} latency: {}".format(job_id, end_at - start_at), fg="green")
|
||||
)
|
||||
except Exception as e:
|
||||
logging.exception("Segments batch created index failed:{}".format(str(e)))
|
||||
logging.exception("Segments batch created index failed")
|
||||
redis_client.setex(indexing_cache_key, 600, "error")
|
||||
|
|
Loading…
Reference in New Issue
Block a user