mirror of
https://github.com/RockChinQ/QChatGPT.git
synced 2024-11-16 11:42:44 +08:00
perf: 优化控制台输出
This commit is contained in:
parent
ae358dd6d0
commit
06c8773975
|
@ -151,7 +151,7 @@ class Controller:
|
|||
except Exception as e:
|
||||
self.ap.logger.error(f"处理请求时出错 query_id={query.query_id}: {e}")
|
||||
self.ap.logger.debug(f"Traceback: {traceback.format_exc()}")
|
||||
traceback.print_exc()
|
||||
# traceback.print_exc()
|
||||
finally:
|
||||
self.ap.logger.debug(f"Query {query} processed")
|
||||
|
||||
|
|
|
@ -32,6 +32,8 @@ class Processor(stage.PipelineStage):
|
|||
"""
|
||||
message_text = str(query.message_chain).strip()
|
||||
|
||||
self.ap.logger.info(f"处理 {query.launcher_type.value}_{query.launcher_id} 的请求({query.query_id}): {message_text}")
|
||||
|
||||
if message_text.startswith('!') or message_text.startswith('!'):
|
||||
return self.cmd_handler.handle(query)
|
||||
else:
|
||||
|
|
|
@ -40,7 +40,7 @@ class AiocqhttpMessageConverter(adapter.MessageConverter):
|
|||
elif type(msg) is mirai.Voice:
|
||||
msg_list.append(aiocqhttp.MessageSegment.record(msg.path))
|
||||
elif type(msg) is forward.Forward:
|
||||
print("aiocqhttp 暂不支持转发消息组件的转换,使用普通消息链发送")
|
||||
# print("aiocqhttp 暂不支持转发消息组件的转换,使用普通消息链发送")
|
||||
|
||||
for node in msg.node_list:
|
||||
msg_list.extend(AiocqhttpMessageConverter.yiri2target(node.message_chain)[0])
|
||||
|
|
|
@ -24,7 +24,7 @@ class ModelManager:
|
|||
for model in self.model_list:
|
||||
if model.name == name:
|
||||
return model
|
||||
raise ValueError(f"Model {name} not found")
|
||||
raise ValueError(f"不支持模型: {name} , 请检查配置文件")
|
||||
|
||||
async def initialize(self):
|
||||
openai_chat_completion = chatcmpl.OpenAIChatCompletion(self.ap)
|
||||
|
|
|
@ -17,7 +17,7 @@ class Tiktoken(tokenizer.LLMTokenizer):
|
|||
try:
|
||||
encoding = tiktoken.encoding_for_model(model.name)
|
||||
except KeyError:
|
||||
print("Warning: model not found. Using cl100k_base encoding.")
|
||||
# print("Warning: model not found. Using cl100k_base encoding.")
|
||||
encoding = tiktoken.get_encoding("cl100k_base")
|
||||
|
||||
num_tokens = 0
|
||||
|
|
Loading…
Reference in New Issue
Block a user