perf: 优化控制台输出

This commit is contained in:
RockChinQ 2024-02-16 14:11:22 +08:00
parent ae358dd6d0
commit 06c8773975
5 changed files with 6 additions and 4 deletions

View File

@ -151,7 +151,7 @@ class Controller:
except Exception as e: except Exception as e:
self.ap.logger.error(f"处理请求时出错 query_id={query.query_id}: {e}") self.ap.logger.error(f"处理请求时出错 query_id={query.query_id}: {e}")
self.ap.logger.debug(f"Traceback: {traceback.format_exc()}") self.ap.logger.debug(f"Traceback: {traceback.format_exc()}")
traceback.print_exc() # traceback.print_exc()
finally: finally:
self.ap.logger.debug(f"Query {query} processed") self.ap.logger.debug(f"Query {query} processed")

View File

@ -32,6 +32,8 @@ class Processor(stage.PipelineStage):
""" """
message_text = str(query.message_chain).strip() message_text = str(query.message_chain).strip()
self.ap.logger.info(f"处理 {query.launcher_type.value}_{query.launcher_id} 的请求({query.query_id}): {message_text}")
if message_text.startswith('!') or message_text.startswith(''): if message_text.startswith('!') or message_text.startswith(''):
return self.cmd_handler.handle(query) return self.cmd_handler.handle(query)
else: else:

View File

@ -40,7 +40,7 @@ class AiocqhttpMessageConverter(adapter.MessageConverter):
elif type(msg) is mirai.Voice: elif type(msg) is mirai.Voice:
msg_list.append(aiocqhttp.MessageSegment.record(msg.path)) msg_list.append(aiocqhttp.MessageSegment.record(msg.path))
elif type(msg) is forward.Forward: elif type(msg) is forward.Forward:
print("aiocqhttp 暂不支持转发消息组件的转换,使用普通消息链发送") # print("aiocqhttp 暂不支持转发消息组件的转换,使用普通消息链发送")
for node in msg.node_list: for node in msg.node_list:
msg_list.extend(AiocqhttpMessageConverter.yiri2target(node.message_chain)[0]) msg_list.extend(AiocqhttpMessageConverter.yiri2target(node.message_chain)[0])

View File

@ -24,7 +24,7 @@ class ModelManager:
for model in self.model_list: for model in self.model_list:
if model.name == name: if model.name == name:
return model return model
raise ValueError(f"Model {name} not found") raise ValueError(f"不支持模型: {name} , 请检查配置文件")
async def initialize(self): async def initialize(self):
openai_chat_completion = chatcmpl.OpenAIChatCompletion(self.ap) openai_chat_completion = chatcmpl.OpenAIChatCompletion(self.ap)

View File

@ -17,7 +17,7 @@ class Tiktoken(tokenizer.LLMTokenizer):
try: try:
encoding = tiktoken.encoding_for_model(model.name) encoding = tiktoken.encoding_for_model(model.name)
except KeyError: except KeyError:
print("Warning: model not found. Using cl100k_base encoding.") # print("Warning: model not found. Using cl100k_base encoding.")
encoding = tiktoken.get_encoding("cl100k_base") encoding = tiktoken.get_encoding("cl100k_base")
num_tokens = 0 num_tokens = 0