From 7a19dd503d02f9324b4abbeb7bc57b4fc6427542 Mon Sep 17 00:00:00 2001 From: Huoyuuu <86390123+Huoyuuu@users.noreply.github.com> Date: Wed, 19 Jun 2024 17:26:06 +0800 Subject: [PATCH 1/3] fix: ensure content is string in chatcmpl call method fix: ensure content is string in chatcmpl call method - Ensure user message content is a string instead of an array - Updated `call` method in `chatcmpl.py` to guarantee content is a string - Resolves compatibility issue with the yi-large model --- pkg/provider/modelmgr/apis/chatcmpl.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/pkg/provider/modelmgr/apis/chatcmpl.py b/pkg/provider/modelmgr/apis/chatcmpl.py index 028b208..198a0fa 100644 --- a/pkg/provider/modelmgr/apis/chatcmpl.py +++ b/pkg/provider/modelmgr/apis/chatcmpl.py @@ -102,9 +102,14 @@ class OpenAIChatCompletions(api.LLMAPIRequester): messages: typing.List[llm_entities.Message], funcs: typing.List[tools_entities.LLMFunction] = None, ) -> llm_entities.Message: - req_messages = [ # req_messages 仅用于类内,外部同步由 query.messages 进行 - m.dict(exclude_none=True) for m in messages - ] + req_messages = [] + for m in messages: + msg_dict = m.dict(exclude_none=True) + if isinstance(msg_dict.get("content"), list): + # 确保content是字符串 + msg_dict["content"] = "".join( + [part["text"] for part in msg_dict["content"]]) + req_messages.append(msg_dict) try: return await self._closure(req_messages, model, funcs) From 5092a8273928c864d12cd5310a8cdf970249de7e Mon Sep 17 00:00:00 2001 From: Huoyuuu <86390123+Huoyuuu@users.noreply.github.com> Date: Wed, 19 Jun 2024 19:13:00 +0800 Subject: [PATCH 2/3] Update chatcmpl.py --- pkg/provider/modelmgr/apis/chatcmpl.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/pkg/provider/modelmgr/apis/chatcmpl.py b/pkg/provider/modelmgr/apis/chatcmpl.py index 198a0fa..cc33ad0 100644 --- a/pkg/provider/modelmgr/apis/chatcmpl.py +++ b/pkg/provider/modelmgr/apis/chatcmpl.py @@ -102,13 +102,15 @@ class OpenAIChatCompletions(api.LLMAPIRequester): messages: typing.List[llm_entities.Message], funcs: typing.List[tools_entities.LLMFunction] = None, ) -> llm_entities.Message: - req_messages = [] + req_messages = [] # req_messages 仅用于类内,外部同步由 query.messages 进行 for m in messages: msg_dict = m.dict(exclude_none=True) - if isinstance(msg_dict.get("content"), list): - # 确保content是字符串 - msg_dict["content"] = "".join( - [part["text"] for part in msg_dict["content"]]) + content = msg_dict.get("content") + if isinstance(content, list): + # 检查 content 列表中是否每个部分都是文本 + if all(isinstance(part, dict) and part.get("type") == "text" for part in content): + # 将所有文本部分合并为一个字符串 + msg_dict["content"] = "".join(part["text"] for part in content) req_messages.append(msg_dict) try: From 39ce5646f6e0e1c2298a0b82209826a20a22797a Mon Sep 17 00:00:00 2001 From: RockChinQ <1010553892@qq.com> Date: Mon, 24 Jun 2024 17:04:50 +0800 Subject: [PATCH 3/3] =?UTF-8?q?perf:=20content=E5=85=83=E7=B4=A0=E6=8B=BC?= =?UTF-8?q?=E6=8E=A5=E6=97=B6=E4=BD=BF=E7=94=A8=E6=8D=A2=E8=A1=8C=E7=AC=A6?= =?UTF-8?q?=E9=97=B4=E9=9A=94?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pkg/provider/modelmgr/apis/chatcmpl.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/provider/modelmgr/apis/chatcmpl.py b/pkg/provider/modelmgr/apis/chatcmpl.py index cc33ad0..3f1cfb3 100644 --- a/pkg/provider/modelmgr/apis/chatcmpl.py +++ b/pkg/provider/modelmgr/apis/chatcmpl.py @@ -110,7 +110,7 @@ class OpenAIChatCompletions(api.LLMAPIRequester): # 检查 content 列表中是否每个部分都是文本 if all(isinstance(part, dict) and part.get("type") == "text" for part in content): # 将所有文本部分合并为一个字符串 - msg_dict["content"] = "".join(part["text"] for part in content) + msg_dict["content"] = "\n".join(part["text"] for part in content) req_messages.append(msg_dict) try: