mirror of
https://github.com/RockChinQ/QChatGPT.git
synced 2024-11-16 11:42:44 +08:00
feat: chatcmpl请求时也忽略空的 system prompt message (#745)
This commit is contained in:
parent
387dfa39ff
commit
63307633c2
|
@ -37,7 +37,7 @@ class AnthropicMessages(api.LLMAPIRequester):
|
|||
args["model"] = query.use_model.name if query.use_model.model_name is None else query.use_model.model_name
|
||||
|
||||
req_messages = [ # req_messages 仅用于类内,外部同步由 query.messages 进行
|
||||
m.dict(exclude_none=True) for m in query.prompt.messages
|
||||
m.dict(exclude_none=True) for m in query.prompt.messages if m.content.strip() != ""
|
||||
] + [m.dict(exclude_none=True) for m in query.messages]
|
||||
|
||||
# 删除所有 role=system & content='' 的消息
|
||||
|
|
|
@ -93,7 +93,7 @@ class OpenAIChatCompletions(api.LLMAPIRequester):
|
|||
pending_tool_calls = []
|
||||
|
||||
req_messages = [ # req_messages 仅用于类内,外部同步由 query.messages 进行
|
||||
m.dict(exclude_none=True) for m in query.prompt.messages
|
||||
m.dict(exclude_none=True) for m in query.prompt.messages if m.content.strip() != ""
|
||||
] + [m.dict(exclude_none=True) for m in query.messages]
|
||||
|
||||
# req_messages.append({"role": "user", "content": str(query.message_chain)})
|
||||
|
|
Loading…
Reference in New Issue
Block a user