Bläddra i källkod

feat:add deepseek r1 think display for ollama provider (#13272)

呆萌闷油瓶 2 månader sedan
förälder
incheckning
0d13aee15c
1 ändrade filer med 9 tillägg och 0 borttagningar
  1. 9 0
      api/core/model_runtime/model_providers/ollama/llm/llm.py

+ 9 - 0
api/core/model_runtime/model_providers/ollama/llm/llm.py

@@ -314,6 +314,7 @@ class OllamaLargeLanguageModel(LargeLanguageModel):
         """
         full_text = ""
         chunk_index = 0
+        is_reasoning_started = False
 
         def create_final_llm_result_chunk(
             index: int, message: AssistantPromptMessage, finish_reason: str
@@ -367,6 +368,14 @@ class OllamaLargeLanguageModel(LargeLanguageModel):
 
                 # transform assistant message to prompt message
                 text = chunk_json["response"]
+            if "<think>" in text:
+                is_reasoning_started = True
+                text = text.replace("<think>", "> 💭 ")
+            elif "</think>" in text:
+                is_reasoning_started = False
+                text = text.replace("</think>", "") + "\n\n"
+            elif is_reasoning_started:
+                text = text.replace("\n", "\n> ")
 
             assistant_prompt_message = AssistantPromptMessage(content=text)