ソースを参照

feat: add zhipu glm_4v_flash (#11440)

非法操作 7 ヶ月 前
コミット
142b4fd699
共有2 個のファイルを変更した56 個の追加2 個の削除を含む
  1. 52 0
      api/core/model_runtime/model_providers/zhipuai/llm/glm_4v_flash.yaml
  2. 4 2
      api/core/model_runtime/model_providers/zhipuai/llm/llm.py

File diff suppressed because it is too large
+ 52 - 0
api/core/model_runtime/model_providers/zhipuai/llm/glm_4v_flash.yaml


+ 4 - 2
api/core/model_runtime/model_providers/zhipuai/llm/llm.py

@@ -144,7 +144,7 @@ class ZhipuAILargeLanguageModel(_CommonZhipuaiAI, LargeLanguageModel):
             if copy_prompt_message.role in {PromptMessageRole.USER, PromptMessageRole.SYSTEM, PromptMessageRole.TOOL}:
             if copy_prompt_message.role in {PromptMessageRole.USER, PromptMessageRole.SYSTEM, PromptMessageRole.TOOL}:
                 if isinstance(copy_prompt_message.content, list):
                 if isinstance(copy_prompt_message.content, list):
                     # check if model is 'glm-4v'
                     # check if model is 'glm-4v'
-                    if model not in {"glm-4v", "glm-4v-plus"}:
+                    if not model.startswith("glm-4v"):
                         # not support list message
                         # not support list message
                         continue
                         continue
                     # get image and
                     # get image and
@@ -188,7 +188,7 @@ class ZhipuAILargeLanguageModel(_CommonZhipuaiAI, LargeLanguageModel):
             else:
             else:
                 model_parameters["tools"] = [web_search_params]
                 model_parameters["tools"] = [web_search_params]
 
 
-        if model in {"glm-4v", "glm-4v-plus"}:
+        if model.startswith("glm-4v"):
             params = self._construct_glm_4v_parameter(model, new_prompt_messages, model_parameters)
             params = self._construct_glm_4v_parameter(model, new_prompt_messages, model_parameters)
         else:
         else:
             params = {"model": model, "messages": [], **model_parameters}
             params = {"model": model, "messages": [], **model_parameters}
@@ -412,6 +412,8 @@ class ZhipuAILargeLanguageModel(_CommonZhipuaiAI, LargeLanguageModel):
         human_prompt = "\n\nHuman:"
         human_prompt = "\n\nHuman:"
         ai_prompt = "\n\nAssistant:"
         ai_prompt = "\n\nAssistant:"
         content = message.content
         content = message.content
+        if isinstance(content, list):
+            content = "".join(c.data for c in content if c.type == PromptMessageContentType.TEXT)
 
 
         if isinstance(message, UserPromptMessage):
         if isinstance(message, UserPromptMessage):
             message_text = f"{human_prompt} {content}"
             message_text = f"{human_prompt} {content}"