瀏覽代碼

feat: llm text stream support for workflow app (#3798)

Co-authored-by: JzoNg <jzongcode@gmail.com>
takatost 1 年之前
父節點
當前提交
ff67a6d338
共有 27 個文件被更改,包括 550 次插入59 次删除
  1. 13 16
      api/core/app/apps/advanced_chat/generate_task_pipeline.py
  2. 156 1
      api/core/app/apps/workflow/generate_task_pipeline.py
  3. 10 1
      api/core/app/apps/workflow/workflow_event_trigger_callback.py
  4. 13 3
      api/core/app/entities/task_entities.py
  5. 43 0
      api/core/workflow/nodes/end/end_node.py
  6. 11 15
      web/app/components/app/text-generate/item/index.tsx
  7. 74 0
      web/app/components/app/text-generate/item/result-tab.tsx
  8. 1 0
      web/app/components/base/chat/types.ts
  9. 5 0
      web/app/components/base/icons/assets/vender/line/editor/image-indent-left.svg
  10. 39 0
      web/app/components/base/icons/src/vender/line/editor/ImageIndentLeft.json
  11. 16 0
      web/app/components/base/icons/src/vender/line/editor/ImageIndentLeft.tsx
  12. 1 0
      web/app/components/base/icons/src/vender/line/editor/index.ts
  13. 14 3
      web/app/components/share/text-generation/result/index.tsx
  14. 22 0
      web/app/components/workflow/hooks/use-workflow-run.ts
  15. 9 16
      web/app/components/workflow/panel/workflow-preview.tsx
  16. 56 0
      web/app/components/workflow/run/result-text.tsx
  17. 7 2
      web/app/components/workflow/store.ts
  18. 6 0
      web/i18n/de-DE/run-log.ts
  19. 6 0
      web/i18n/en-US/run-log.ts
  20. 6 0
      web/i18n/fr-FR/run-log.ts
  21. 6 0
      web/i18n/ja-JP/run-log.ts
  22. 6 0
      web/i18n/pt-BR/run-log.ts
  23. 6 0
      web/i18n/uk-UA/run-log.ts
  24. 6 0
      web/i18n/vi-VN/run-log.ts
  25. 6 0
      web/i18n/zh-Hans/run-log.ts
  26. 6 0
      web/i18n/zh-Hant/run-log.ts
  27. 6 2
      web/service/share.ts

+ 13 - 16
api/core/app/apps/advanced_chat/generate_task_pipeline.py

@@ -28,9 +28,9 @@ from core.app.entities.task_entities import (
     AdvancedChatTaskState,
     ChatbotAppBlockingResponse,
     ChatbotAppStreamResponse,
+    ChatflowStreamGenerateRoute,
     ErrorStreamResponse,
     MessageEndStreamResponse,
-    StreamGenerateRoute,
     StreamResponse,
 )
 from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
@@ -343,7 +343,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
             **extras
         )
 
-    def _get_stream_generate_routes(self) -> dict[str, StreamGenerateRoute]:
+    def _get_stream_generate_routes(self) -> dict[str, ChatflowStreamGenerateRoute]:
         """
         Get stream generate routes.
         :return:
@@ -366,7 +366,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
                 continue
 
             for start_node_id in start_node_ids:
-                stream_generate_routes[start_node_id] = StreamGenerateRoute(
+                stream_generate_routes[start_node_id] = ChatflowStreamGenerateRoute(
                     answer_node_id=answer_node_id,
                     generate_route=generate_route
                 )
@@ -430,15 +430,14 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
             for route_chunk in route_chunks:
                 if route_chunk.type == 'text':
                     route_chunk = cast(TextGenerateRouteChunk, route_chunk)
-                    for token in route_chunk.text:
-                        # handle output moderation chunk
-                        should_direct_answer = self._handle_output_moderation_chunk(token)
-                        if should_direct_answer:
-                            continue
-
-                        self._task_state.answer += token
-                        yield self._message_to_stream_response(token, self._message.id)
-                        time.sleep(0.01)
+
+                    # handle output moderation chunk
+                    should_direct_answer = self._handle_output_moderation_chunk(route_chunk.text)
+                    if should_direct_answer:
+                        continue
+
+                    self._task_state.answer += route_chunk.text
+                    yield self._message_to_stream_response(route_chunk.text, self._message.id)
                 else:
                     break
 
@@ -463,10 +462,8 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
         for route_chunk in route_chunks:
             if route_chunk.type == 'text':
                 route_chunk = cast(TextGenerateRouteChunk, route_chunk)
-                for token in route_chunk.text:
-                    self._task_state.answer += token
-                    yield self._message_to_stream_response(token, self._message.id)
-                    time.sleep(0.01)
+                self._task_state.answer += route_chunk.text
+                yield self._message_to_stream_response(route_chunk.text, self._message.id)
             else:
                 route_chunk = cast(VarGenerateRouteChunk, route_chunk)
                 value_selector = route_chunk.value_selector

+ 156 - 1
api/core/app/apps/workflow/generate_task_pipeline.py

@@ -28,11 +28,13 @@ from core.app.entities.task_entities import (
     WorkflowAppBlockingResponse,
     WorkflowAppStreamResponse,
     WorkflowFinishStreamResponse,
+    WorkflowStreamGenerateNodes,
     WorkflowTaskState,
 )
 from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
 from core.app.task_pipeline.workflow_cycle_manage import WorkflowCycleManage
-from core.workflow.entities.node_entities import SystemVariable
+from core.workflow.entities.node_entities import NodeType, SystemVariable
+from core.workflow.nodes.end.end_node import EndNode
 from extensions.ext_database import db
 from models.account import Account
 from models.model import EndUser
@@ -40,6 +42,7 @@ from models.workflow import (
     Workflow,
     WorkflowAppLog,
     WorkflowAppLogCreatedFrom,
+    WorkflowNodeExecution,
     WorkflowRun,
 )
 
@@ -83,6 +86,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
         }
 
         self._task_state = WorkflowTaskState()
+        self._stream_generate_nodes = self._get_stream_generate_nodes()
 
     def process(self) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
         """
@@ -167,6 +171,14 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
                 )
             elif isinstance(event, QueueNodeStartedEvent):
                 workflow_node_execution = self._handle_node_start(event)
+
+                # search stream_generate_routes if node id is answer start at node
+                if not self._task_state.current_stream_generate_state and event.node_id in self._stream_generate_nodes:
+                    self._task_state.current_stream_generate_state = self._stream_generate_nodes[event.node_id]
+
+                    # generate stream outputs when node started
+                    yield from self._generate_stream_outputs_when_node_started()
+
                 yield self._workflow_node_start_to_stream_response(
                     event=event,
                     task_id=self._application_generate_entity.task_id,
@@ -174,6 +186,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
                 )
             elif isinstance(event, QueueNodeSucceededEvent | QueueNodeFailedEvent):
                 workflow_node_execution = self._handle_node_finished(event)
+
                 yield self._workflow_node_finish_to_stream_response(
                     task_id=self._application_generate_entity.task_id,
                     workflow_node_execution=workflow_node_execution
@@ -193,6 +206,11 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
                 if delta_text is None:
                     continue
 
+                if not self._is_stream_out_support(
+                        event=event
+                ):
+                    continue
+
                 self._task_state.answer += delta_text
                 yield self._text_chunk_to_stream_response(delta_text)
             elif isinstance(event, QueueMessageReplaceEvent):
@@ -254,3 +272,140 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
             task_id=self._application_generate_entity.task_id,
             text=TextReplaceStreamResponse.Data(text=text)
         )
+
+    def _get_stream_generate_nodes(self) -> dict[str, WorkflowStreamGenerateNodes]:
+        """
+        Get stream generate nodes.
+        :return:
+        """
+        # find all answer nodes
+        graph = self._workflow.graph_dict
+        end_node_configs = [
+            node for node in graph['nodes']
+            if node.get('data', {}).get('type') == NodeType.END.value
+        ]
+
+        # parse stream output node value selectors of end nodes
+        stream_generate_routes = {}
+        for node_config in end_node_configs:
+            # get generate route for stream output
+            end_node_id = node_config['id']
+            generate_nodes = EndNode.extract_generate_nodes(graph, node_config)
+            start_node_ids = self._get_end_start_at_node_ids(graph, end_node_id)
+            if not start_node_ids:
+                continue
+
+            for start_node_id in start_node_ids:
+                stream_generate_routes[start_node_id] = WorkflowStreamGenerateNodes(
+                    end_node_id=end_node_id,
+                    stream_node_ids=generate_nodes
+                )
+
+        return stream_generate_routes
+
+    def _get_end_start_at_node_ids(self, graph: dict, target_node_id: str) \
+            -> list[str]:
+        """
+        Get end start at node id.
+        :param graph: graph
+        :param target_node_id: target node ID
+        :return:
+        """
+        nodes = graph.get('nodes')
+        edges = graph.get('edges')
+
+        # fetch all ingoing edges from source node
+        ingoing_edges = []
+        for edge in edges:
+            if edge.get('target') == target_node_id:
+                ingoing_edges.append(edge)
+
+        if not ingoing_edges:
+            return []
+
+        start_node_ids = []
+        for ingoing_edge in ingoing_edges:
+            source_node_id = ingoing_edge.get('source')
+            source_node = next((node for node in nodes if node.get('id') == source_node_id), None)
+            if not source_node:
+                continue
+
+            node_type = source_node.get('data', {}).get('type')
+            if node_type in [
+                NodeType.IF_ELSE.value,
+                NodeType.QUESTION_CLASSIFIER.value
+            ]:
+                start_node_id = target_node_id
+                start_node_ids.append(start_node_id)
+            elif node_type == NodeType.START.value:
+                start_node_id = source_node_id
+                start_node_ids.append(start_node_id)
+            else:
+                sub_start_node_ids = self._get_end_start_at_node_ids(graph, source_node_id)
+                if sub_start_node_ids:
+                    start_node_ids.extend(sub_start_node_ids)
+
+        return start_node_ids
+
+    def _generate_stream_outputs_when_node_started(self) -> Generator:
+        """
+        Generate stream outputs.
+        :return:
+        """
+        if self._task_state.current_stream_generate_state:
+            stream_node_ids = self._task_state.current_stream_generate_state.stream_node_ids
+
+            for node_id, node_execution_info in self._task_state.ran_node_execution_infos.items():
+                if node_id not in stream_node_ids:
+                    continue
+
+                node_execution_info = self._task_state.ran_node_execution_infos[node_id]
+
+                # get chunk node execution
+                route_chunk_node_execution = db.session.query(WorkflowNodeExecution).filter(
+                    WorkflowNodeExecution.id == node_execution_info.workflow_node_execution_id).first()
+
+                if not route_chunk_node_execution:
+                    continue
+
+                outputs = route_chunk_node_execution.outputs_dict
+
+                if not outputs:
+                    continue
+
+                # get value from outputs
+                text = outputs.get('text')
+
+                if text:
+                    self._task_state.answer += text
+                    yield self._text_chunk_to_stream_response(text)
+
+    def _is_stream_out_support(self, event: QueueTextChunkEvent) -> bool:
+        """
+        Is stream out support
+        :param event: queue text chunk event
+        :return:
+        """
+        if not event.metadata:
+            return False
+
+        if 'node_id' not in event.metadata:
+            return False
+
+        node_id = event.metadata.get('node_id')
+        node_type = event.metadata.get('node_type')
+        stream_output_value_selector = event.metadata.get('value_selector')
+        if not stream_output_value_selector:
+            return False
+
+        if not self._task_state.current_stream_generate_state:
+            return False
+
+        if node_id not in self._task_state.current_stream_generate_state.stream_node_ids:
+            return False
+
+        if node_type != NodeType.LLM:
+            # only LLM support chunk stream output
+            return False
+
+        return True

+ 10 - 1
api/core/app/apps/workflow/workflow_event_trigger_callback.py

@@ -6,6 +6,7 @@ from core.app.entities.queue_entities import (
     QueueNodeFailedEvent,
     QueueNodeStartedEvent,
     QueueNodeSucceededEvent,
+    QueueTextChunkEvent,
     QueueWorkflowFailedEvent,
     QueueWorkflowStartedEvent,
     QueueWorkflowSucceededEvent,
@@ -119,7 +120,15 @@ class WorkflowEventTriggerCallback(BaseWorkflowCallback):
         """
         Publish text chunk
         """
-        pass
+        self._queue_manager.publish(
+            QueueTextChunkEvent(
+                text=text,
+                metadata={
+                    "node_id": node_id,
+                    **metadata
+                }
+            ), PublishFrom.APPLICATION_MANAGER
+        )
 
     def on_event(self, event: AppQueueEvent) -> None:
         """

+ 13 - 3
api/core/app/entities/task_entities.py

@@ -9,9 +9,17 @@ from core.workflow.entities.node_entities import NodeType
 from core.workflow.nodes.answer.entities import GenerateRouteChunk
 
 
-class StreamGenerateRoute(BaseModel):
+class WorkflowStreamGenerateNodes(BaseModel):
     """
-    StreamGenerateRoute entity
+    WorkflowStreamGenerateNodes entity
+    """
+    end_node_id: str
+    stream_node_ids: list[str]
+
+
+class ChatflowStreamGenerateRoute(BaseModel):
+    """
+    ChatflowStreamGenerateRoute entity
     """
     answer_node_id: str
     generate_route: list[GenerateRouteChunk]
@@ -55,6 +63,8 @@ class WorkflowTaskState(TaskState):
     ran_node_execution_infos: dict[str, NodeExecutionInfo] = {}
     latest_node_execution_info: Optional[NodeExecutionInfo] = None
 
+    current_stream_generate_state: Optional[WorkflowStreamGenerateNodes] = None
+
 
 class AdvancedChatTaskState(WorkflowTaskState):
     """
@@ -62,7 +72,7 @@ class AdvancedChatTaskState(WorkflowTaskState):
     """
     usage: LLMUsage
 
-    current_stream_generate_state: Optional[StreamGenerateRoute] = None
+    current_stream_generate_state: Optional[ChatflowStreamGenerateRoute] = None
 
 
 class StreamEvent(Enum):

+ 43 - 0
api/core/workflow/nodes/end/end_node.py

@@ -37,6 +37,49 @@ class EndNode(BaseNode):
         )
 
     @classmethod
+    def extract_generate_nodes(cls, graph: dict, config: dict) -> list[str]:
+        """
+        Extract generate nodes
+        :param graph: graph
+        :param config: node config
+        :return:
+        """
+        node_data = cls._node_data_cls(**config.get("data", {}))
+        node_data = cast(cls._node_data_cls, node_data)
+
+        return cls.extract_generate_nodes_from_node_data(graph, node_data)
+
+    @classmethod
+    def extract_generate_nodes_from_node_data(cls, graph: dict, node_data: EndNodeData) -> list[str]:
+        """
+        Extract generate nodes from node data
+        :param graph: graph
+        :param node_data: node data object
+        :return:
+        """
+        nodes = graph.get('nodes')
+        node_mapping = {node.get('id'): node for node in nodes}
+
+        variable_selectors = node_data.outputs
+
+        generate_nodes = []
+        for variable_selector in variable_selectors:
+            if not variable_selector.value_selector:
+                continue
+
+            node_id = variable_selector.value_selector[0]
+            if node_id != 'sys' and node_id in node_mapping:
+                node = node_mapping[node_id]
+                node_type = node.get('data', {}).get('type')
+                if node_type == NodeType.LLM.value and variable_selector.value_selector[1] == 'text':
+                    generate_nodes.append(node_id)
+
+        # remove duplicates
+        generate_nodes = list(set(generate_nodes))
+
+        return generate_nodes
+
+    @classmethod
     def _extract_variable_selector_to_variable_mapping(cls, node_data: BaseNodeData) -> dict[str, list[str]]:
         """
         Extract variable selector to variable mapping

+ 11 - 15
web/app/components/app/text-generate/item/index.tsx

@@ -8,9 +8,8 @@ import { useParams } from 'next/navigation'
 import { HandThumbDownIcon, HandThumbUpIcon } from '@heroicons/react/24/outline'
 import { useBoolean } from 'ahooks'
 import { HashtagIcon } from '@heroicons/react/24/solid'
-// import PromptLog from '@/app/components/app/chat/log'
+import ResultTab from './result-tab'
 import { Markdown } from '@/app/components/base/markdown'
-import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/code-editor'
 import Loading from '@/app/components/base/loading'
 import Toast from '@/app/components/base/toast'
 import AudioBtn from '@/app/components/base/audio-btn'
@@ -26,7 +25,6 @@ import EditReplyModal from '@/app/components/app/annotation/edit-annotation-moda
 import { useStore as useAppStore } from '@/app/components/app/store'
 import WorkflowProcessItem from '@/app/components/base/chat/chat/answer/workflow-process'
 import type { WorkflowProcess } from '@/app/components/base/chat/types'
-import { CodeLanguage } from '@/app/components/workflow/nodes/code/types'
 
 const MAX_DEPTH = 3
 
@@ -293,23 +291,17 @@ const GenerationItem: FC<IGenerationItemProps> = ({
             <div className={`flex ${contentClassName}`}>
               <div className='grow w-0'>
                 {workflowProcessData && (
-                  <WorkflowProcessItem grayBg data={workflowProcessData} expand={workflowProcessData.expand} />
+                  <WorkflowProcessItem grayBg hideInfo data={workflowProcessData} expand={workflowProcessData.expand} />
+                )}
+                {workflowProcessData && !isError && (
+                  <ResultTab data={workflowProcessData} content={content} />
                 )}
                 {isError && (
                   <div className='text-gray-400 text-sm'>{t('share.generation.batchFailed.outputPlaceholder')}</div>
                 )}
-                {!isError && (typeof content === 'string') && (
+                {!workflowProcessData && !isError && (typeof content === 'string') && (
                   <Markdown content={content} />
                 )}
-                {!isError && (typeof content !== 'string') && (
-                  <CodeEditor
-                    readOnly
-                    title={<div/>}
-                    language={CodeLanguage.json}
-                    value={content}
-                    isJSONStringifyBeauty
-                  />
-                )}
               </div>
             </div>
 
@@ -427,7 +419,11 @@ const GenerationItem: FC<IGenerationItemProps> = ({
                   </>
                 )}
               </div>
-              <div className='text-xs text-gray-500'>{content?.length} {t('common.unit.char')}</div>
+              <div>
+                {!workflowProcessData && (
+                  <div className='text-xs text-gray-500'>{content?.length} {t('common.unit.char')}</div>
+                )}
+              </div>
             </div>
 
           </div>

+ 74 - 0
web/app/components/app/text-generate/item/result-tab.tsx

@@ -0,0 +1,74 @@
+import {
+  memo,
+  useEffect,
+  // useRef,
+  useState,
+} from 'react'
+import cn from 'classnames'
+import { useTranslation } from 'react-i18next'
+// import Loading from '@/app/components/base/loading'
+import { Markdown } from '@/app/components/base/markdown'
+import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/code-editor'
+import { CodeLanguage } from '@/app/components/workflow/nodes/code/types'
+import type { WorkflowProcess } from '@/app/components/base/chat/types'
+// import { WorkflowRunningStatus } from '@/app/components/workflow/types'
+
+const ResultTab = ({
+  data,
+  content,
+}: {
+  data?: WorkflowProcess
+  content: any
+}) => {
+  const { t } = useTranslation()
+  const [currentTab, setCurrentTab] = useState<string>('DETAIL')
+
+  const switchTab = async (tab: string) => {
+    setCurrentTab(tab)
+  }
+  useEffect(() => {
+    if (data?.resultText)
+      switchTab('RESULT')
+    else
+      switchTab('DETAIL')
+  }, [data?.resultText])
+
+  return (
+    <div className='grow relative flex flex-col'>
+      {data?.resultText && (
+        <div className='shrink-0 flex items-center mb-2 border-b-[0.5px] border-[rgba(0,0,0,0.05)]'>
+          <div
+            className={cn(
+              'mr-6 py-3 border-b-2 border-transparent text-[13px] font-semibold leading-[18px] text-gray-400 cursor-pointer',
+              currentTab === 'RESULT' && '!border-[rgb(21,94,239)] text-gray-700',
+            )}
+            onClick={() => switchTab('RESULT')}
+          >{t('runLog.result')}</div>
+          <div
+            className={cn(
+              'mr-6 py-3 border-b-2 border-transparent text-[13px] font-semibold leading-[18px] text-gray-400 cursor-pointer',
+              currentTab === 'DETAIL' && '!border-[rgb(21,94,239)] text-gray-700',
+            )}
+            onClick={() => switchTab('DETAIL')}
+          >{t('runLog.detail')}</div>
+        </div>
+      )}
+      <div className={cn('grow bg-white')}>
+        {currentTab === 'RESULT' && (
+          <Markdown content={data?.resultText || ''} />
+        )}
+        {currentTab === 'DETAIL' && content && (
+          <CodeEditor
+            readOnly
+            title={<div>JSON OUTPUT</div>}
+            language={CodeLanguage.json}
+            value={content}
+            isJSONStringifyBeauty
+          />
+        )}
+      </div>
+    </div>
+  )
+}
+
+export default memo(ResultTab)

+ 1 - 0
web/app/components/base/chat/types.ts

@@ -54,6 +54,7 @@ export type WorkflowProcess = {
   status: WorkflowRunningStatus
   tracing: NodeTracing[]
   expand?: boolean // for UI
+  resultText?: string
 }
 
 export type ChatItem = IChatItem & {

文件差異過大導致無法顯示
+ 5 - 0
web/app/components/base/icons/assets/vender/line/editor/image-indent-left.svg


文件差異過大導致無法顯示
+ 39 - 0
web/app/components/base/icons/src/vender/line/editor/ImageIndentLeft.json


+ 16 - 0
web/app/components/base/icons/src/vender/line/editor/ImageIndentLeft.tsx

@@ -0,0 +1,16 @@
+// GENERATE BY script
+// DON NOT EDIT IT MANUALLY
+
+import * as React from 'react'
+import data from './ImageIndentLeft.json'
+import IconBase from '@/app/components/base/icons/IconBase'
+import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
+
+const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
+  props,
+  ref,
+) => <IconBase {...props} ref={ref} data={data as IconData} />)
+
+Icon.displayName = 'ImageIndentLeft'
+
+export default Icon

+ 1 - 0
web/app/components/base/icons/src/vender/line/editor/index.ts

@@ -1,6 +1,7 @@
 export { default as AlignLeft } from './AlignLeft'
 export { default as BezierCurve03 } from './BezierCurve03'
 export { default as Colors } from './Colors'
+export { default as ImageIndentLeft } from './ImageIndentLeft'
 export { default as LeftIndent02 } from './LeftIndent02'
 export { default as LetterSpacing01 } from './LetterSpacing01'
 export { default as TypeSquare } from './TypeSquare'

+ 14 - 3
web/app/components/share/text-generation/result/index.tsx

@@ -201,6 +201,7 @@ const Result: FC<IResultProps> = ({
               status: WorkflowRunningStatus.Running,
               tracing: [],
               expand: false,
+              resultText: '',
             })
             setRespondingFalse()
           },
@@ -243,15 +244,25 @@ const Result: FC<IResultProps> = ({
             }))
             if (!data.outputs)
               setCompletionRes('')
-            else if (Object.keys(data.outputs).length > 1)
-              setCompletionRes(data.outputs)
             else
-              setCompletionRes(data.outputs[Object.keys(data.outputs)[0]])
+              setCompletionRes(data.outputs)
             setRespondingFalse()
             setMessageId(tempMessageId)
             onCompleted(getCompletionRes(), taskId, true)
             isEnd = true
           },
+          onTextChunk: (params) => {
+            const { data: { text } } = params
+            setWorkflowProccessData(produce(getWorkflowProccessData()!, (draft) => {
+              draft.resultText += text
+            }))
+          },
+          onTextReplace: (params) => {
+            const { data: { text } } = params
+            setWorkflowProccessData(produce(getWorkflowProccessData()!, (draft) => {
+              draft.resultText = text
+            }))
+          },
         },
         isInstalledApp,
         installedAppInfo?.id,

+ 22 - 0
web/app/components/workflow/hooks/use-workflow-run.ts

@@ -124,6 +124,7 @@ export const useWorkflowRun = () => {
         status: WorkflowRunningStatus.Running,
       },
       tracing: [],
+      resultText: '',
     })
 
     ssePost(
@@ -284,6 +285,27 @@ export const useWorkflowRun = () => {
           if (onNodeFinished)
             onNodeFinished(params)
         },
+        onTextChunk: (params) => {
+          const { data: { text } } = params
+          const {
+            workflowRunningData,
+            setWorkflowRunningData,
+          } = workflowStore.getState()
+          setWorkflowRunningData(produce(workflowRunningData!, (draft) => {
+            draft.resultTabActive = true
+            draft.resultText += text
+          }))
+        },
+        onTextReplace: (params) => {
+          const { data: { text } } = params
+          const {
+            workflowRunningData,
+            setWorkflowRunningData,
+          } = workflowStore.getState()
+          setWorkflowRunningData(produce(workflowRunningData!, (draft) => {
+            draft.resultText = text
+          }))
+        },
         ...restCallback,
       },
     )

+ 9 - 16
web/app/components/workflow/panel/workflow-preview.tsx

@@ -1,12 +1,12 @@
 import {
   memo,
   useEffect,
-  useRef,
+  // useRef,
   useState,
 } from 'react'
 import cn from 'classnames'
 import { useTranslation } from 'react-i18next'
-import OutputPanel from '../run/output-panel'
+import ResultText from '../run/result-text'
 import ResultPanel from '../run/result-panel'
 import TracingPanel from '../run/tracing-panel'
 import {
@@ -32,22 +32,15 @@ const WorkflowPreview = () => {
     setCurrentTab(tab)
   }
 
-  const [height, setHieght] = useState(0)
-  const ref = useRef<HTMLDivElement>(null)
-
   useEffect(() => {
     if (showDebugAndPreviewPanel && showInputsPanel)
       setCurrentTab('INPUT')
   }, [showDebugAndPreviewPanel, showInputsPanel])
 
-  const adjustResultHeight = () => {
-    if (ref.current)
-      setHieght(ref.current?.clientHeight - 16 - 16 - 2 - 1)
-  }
-
   useEffect(() => {
-    adjustResultHeight()
-  }, [])
+    if ((workflowRunningData?.result.status === WorkflowRunningStatus.Succeeded || workflowRunningData?.result.status === WorkflowRunningStatus.Failed) && !workflowRunningData.resultText)
+      switchTab('DETAIL')
+  }, [workflowRunningData])
 
   return (
     <div className={`
@@ -107,7 +100,7 @@ const WorkflowPreview = () => {
             }}
           >{t('runLog.tracing')}</div>
         </div>
-        <div ref={ref} className={cn(
+        <div className={cn(
           'grow bg-white h-0 overflow-y-auto rounded-b-2xl',
           (currentTab === 'RESULT' || currentTab === 'TRACING') && '!bg-gray-50',
         )}>
@@ -115,11 +108,11 @@ const WorkflowPreview = () => {
             <InputsPanel onRun={() => switchTab('RESULT')} />
           )}
           {currentTab === 'RESULT' && (
-            <OutputPanel
+            <ResultText
               isRunning={workflowRunningData?.result?.status === WorkflowRunningStatus.Running || !workflowRunningData?.result}
-              outputs={workflowRunningData?.result?.outputs}
+              outputs={workflowRunningData?.resultText}
               error={workflowRunningData?.result?.error}
-              height={height}
+              onClick={() => switchTab('DETAIL')}
             />
           )}
           {currentTab === 'DETAIL' && (

+ 56 - 0
web/app/components/workflow/run/result-text.tsx

@@ -0,0 +1,56 @@
+'use client'
+import type { FC } from 'react'
+import { useTranslation } from 'react-i18next'
+import { ImageIndentLeft } from '@/app/components/base/icons/src/vender/line/editor'
+import { Markdown } from '@/app/components/base/markdown'
+import LoadingAnim from '@/app/components/app/chat/loading-anim'
+
+type ResultTextProps = {
+  isRunning?: boolean
+  outputs?: any
+  error?: string
+  onClick?: () => void
+}
+
+const ResultText: FC<ResultTextProps> = ({
+  isRunning,
+  outputs,
+  error,
+  onClick,
+}) => {
+  const { t } = useTranslation()
+  return (
+    <div className='bg-gray-50 py-2'>
+      {isRunning && !outputs && (
+        <div className='pt-4 pl-[26px]'>
+          <LoadingAnim type='text' />
+        </div>
+      )}
+      {!isRunning && error && (
+        <div className='px-4'>
+          <div className='px-3 py-[10px] rounded-lg !bg-[#fef3f2] border-[0.5px] border-[rbga(0,0,0,0.05)] shadow-xs'>
+            <div className='text-xs leading-[18px] text-[#d92d20]'>{error}</div>
+          </div>
+        </div>
+      )}
+      {!isRunning && !outputs && !error && (
+        <div className='mt-[120px] px-4 py-2 flex flex-col items-center text-[13px] leading-[18px] text-gray-500'>
+          <ImageIndentLeft className='w-6 h-6 text-gray-400' />
+          <div className='mr-2'>{t('runLog.resultEmpty.title')}</div>
+          <div>
+            {t('runLog.resultEmpty.tipLeft')}
+            <span onClick={onClick} className='cursor-pointer text-primary-600'>{t('runLog.resultEmpty.link')}</span>
+            {t('runLog.resultEmpty.tipRight')}
+          </div>
+        </div>
+      )}
+      {outputs && (
+        <div className='px-4 py-2'>
+          <Markdown content={outputs} />
+        </div>
+      )}
+    </div>
+  )
+}
+
+export default ResultText

+ 7 - 2
web/app/components/workflow/store.ts

@@ -19,11 +19,16 @@ import type {
 } from './types'
 import { WorkflowContext } from './context'
 
+type PreviewRunningData = WorkflowRunningData & {
+  resultTabActive?: boolean
+  resultText?: string
+}
+
 type Shape = {
   appId: string
   panelWidth: number
-  workflowRunningData?: WorkflowRunningData
-  setWorkflowRunningData: (workflowData?: WorkflowRunningData) => void
+  workflowRunningData?: PreviewRunningData
+  setWorkflowRunningData: (workflowData: PreviewRunningData) => void
   historyWorkflowData?: HistoryWorkflowData
   setHistoryWorkflowData: (historyWorkflowData?: HistoryWorkflowData) => void
   showRunHistory: boolean

+ 6 - 0
web/i18n/de-DE/run-log.ts

@@ -18,6 +18,12 @@ const translation = {
     tokens: 'Gesamtzeichen',
     steps: 'Ausführungsschritte',
   },
+  resultEmpty: {
+    title: 'Dieser Lauf gibt nur das JSON-Format aus',
+    tipLeft: 'Bitte gehen Sie zum ',
+    Link: 'Detailpanel',
+    tipRight: 'ansehen.',
+  },
 }
 
 export default translation

+ 6 - 0
web/i18n/en-US/run-log.ts

@@ -18,6 +18,12 @@ const translation = {
     tokens: 'Total Tokens',
     steps: 'Run Steps',
   },
+  resultEmpty: {
+    title: 'This run only output JSON format,',
+    tipLeft: 'please go to the ',
+    link: 'detail panel',
+    tipRight: ' view it.',
+  },
 }
 
 export default translation

+ 6 - 0
web/i18n/fr-FR/run-log.ts

@@ -18,6 +18,12 @@ const translation = {
     tokens: 'Total des jetons',
     steps: 'Étapes d\'exécution',
   },
+  resultEmpty: {
+    title: 'Cela exécute uniquement le format de sortie JSON,',
+    tipLeft: 'veuillez aller à ',
+    link: 'panneau de détail',
+    tipRight: ' visualisez-le.',
+  },
 }
 
 export default translation

+ 6 - 0
web/i18n/ja-JP/run-log.ts

@@ -18,6 +18,12 @@ const translation = {
     tokens: 'トークンの合計',
     steps: '実行ステップ',
   },
+  resultEmpty: {
+    title: 'この実行では JSON 形式のみが出力されます',
+    tipLeft: 'にアクセスしてください',
+    link: '詳細パネル',
+    tipRight: '表示します。',
+  },
 }
 
 export default translation

+ 6 - 0
web/i18n/pt-BR/run-log.ts

@@ -18,6 +18,12 @@ const translation = {
     tokens: 'Total de Tokens',
     steps: 'Passos de Execução',
   },
+  resultEmpty: {
+    title: 'Esta execução apenas produz o formato JSON,',
+    tipLeft: 'por favor vá para ',
+    link: 'painel de detalhes',
+    tipRight: ' veja.',
+  },
 }
 
 export default translation

+ 6 - 0
web/i18n/uk-UA/run-log.ts

@@ -18,6 +18,12 @@ const translation = {
     tokens: 'Загальна кількість токенів',
     steps: 'Кроки виконання',
   },
+  resultEmpty: {
+    title: 'Цей запуск лише вихідного формату JSON,',
+    tipLeft: 'будь ласка, перейдіть до ',
+    link: 'панель деталей',
+    tipRight: ' переглянути.',
+  },
 }
 
 export default translation

+ 6 - 0
web/i18n/vi-VN/run-log.ts

@@ -18,6 +18,12 @@ const translation = {
     tokens: 'Tổng số token',
     steps: 'Các bước chạy',
   },
+  resultEmpty: {
+    title: 'Chạy này chỉ xuất ra định dạng JSON,',
+    tipLeft: 'vui lòng truy cập ',
+    link: 'bảng chi tiết',
+    tipRight: ' xem nó.',
+  },
 }
 
 export default translation

+ 6 - 0
web/i18n/zh-Hans/run-log.ts

@@ -18,6 +18,12 @@ const translation = {
     tokens: '总 token 数',
     steps: '运行步数',
   },
+  resultEmpty: {
+    title: '本次运行仅输出JSON格式,',
+    tipLeft: '请转到',
+    link: '详细信息面板',
+    tipRight: '查看它。',
+  },
 }
 
 export default translation

+ 6 - 0
web/i18n/zh-Hant/run-log.ts

@@ -18,6 +18,12 @@ const translation = {
     tokens: '總 token 數',
     steps: '執行步數',
   },
+  resultEmpty: {
+    title: '本運行僅輸出JSON格式,',
+    tipLeft: '請到',
+    link: '詳細資訊面板',
+    tipRight: '查看它。',
+  },
 }
 
 export default translation

+ 6 - 2
web/service/share.ts

@@ -1,4 +1,4 @@
-import type { IOnCompleted, IOnData, IOnError, IOnFile, IOnMessageEnd, IOnMessageReplace, IOnNodeFinished, IOnNodeStarted, IOnThought, IOnWorkflowFinished, IOnWorkflowStarted } from './base'
+import type { IOnCompleted, IOnData, IOnError, IOnFile, IOnMessageEnd, IOnMessageReplace, IOnNodeFinished, IOnNodeStarted, IOnTextChunk, IOnTextReplace, IOnThought, IOnWorkflowFinished, IOnWorkflowStarted } from './base'
 import {
   del as consoleDel, get as consoleGet, patch as consolePatch, post as consolePost,
   delPublic as del, getPublic as get, patchPublic as patch, postPublic as post, ssePost,
@@ -72,11 +72,15 @@ export const sendWorkflowMessage = async (
     onNodeStarted,
     onNodeFinished,
     onWorkflowFinished,
+    onTextChunk,
+    onTextReplace,
   }: {
     onWorkflowStarted: IOnWorkflowStarted
     onNodeStarted: IOnNodeStarted
     onNodeFinished: IOnNodeFinished
     onWorkflowFinished: IOnWorkflowFinished
+    onTextChunk: IOnTextChunk
+    onTextReplace: IOnTextReplace
   },
   isInstalledApp: boolean,
   installedAppId = '',
@@ -86,7 +90,7 @@ export const sendWorkflowMessage = async (
       ...body,
       response_mode: 'streaming',
     },
-  }, { onNodeStarted, onWorkflowStarted, onWorkflowFinished, isPublicAPI: !isInstalledApp, onNodeFinished })
+  }, { onNodeStarted, onWorkflowStarted, onWorkflowFinished, isPublicAPI: !isInstalledApp, onNodeFinished, onTextChunk, onTextReplace })
 }
 
 export const fetchAppInfo = async () => {