feat: chatcmpl请求时也忽略空的 system prompt message (#745)

This commit is contained in:
RockChinQ 2024-03-29 17:34:09 +08:00
parent 387dfa39ff
commit 63307633c2
2 changed files with 2 additions and 2 deletions

View File

@ -37,7 +37,7 @@ class AnthropicMessages(api.LLMAPIRequester):
args["model"] = query.use_model.name if query.use_model.model_name is None else query.use_model.model_name
req_messages = [ # req_messages 仅用于类内,外部同步由 query.messages 进行
m.dict(exclude_none=True) for m in query.prompt.messages
m.dict(exclude_none=True) for m in query.prompt.messages if m.content.strip() != ""
] + [m.dict(exclude_none=True) for m in query.messages]
# 删除所有 role=system & content='' 的消息

View File

@ -93,7 +93,7 @@ class OpenAIChatCompletions(api.LLMAPIRequester):
pending_tool_calls = []
req_messages = [ # req_messages 仅用于类内,外部同步由 query.messages 进行
m.dict(exclude_none=True) for m in query.prompt.messages
m.dict(exclude_none=True) for m in query.prompt.messages if m.content.strip() != ""
] + [m.dict(exclude_none=True) for m in query.messages]
# req_messages.append({"role": "user", "content": str(query.message_chain)})