From 98e75ad7d6acfd9473ae0631d45c88a25733738c Mon Sep 17 00:00:00 2001 From: Henry Heng Date: Wed, 14 May 2025 00:07:54 +0800 Subject: [PATCH] Bugfix/ui streaming when model streaming is off (#4424) fix ui streaming when model streaming is off --- .../components/nodes/agentflow/Agent/Agent.ts | 18 +++++++++++++++--- packages/components/nodes/agentflow/LLM/LLM.ts | 6 +++++- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/packages/components/nodes/agentflow/Agent/Agent.ts b/packages/components/nodes/agentflow/Agent/Agent.ts index 3eeda6d28..8cfdd6a46 100644 --- a/packages/components/nodes/agentflow/Agent/Agent.ts +++ b/packages/components/nodes/agentflow/Agent/Agent.ts @@ -903,7 +903,11 @@ class Agent_Agentflow implements INode { } } else if (!humanInput && !isStreamable && isLastNode && sseStreamer) { // Stream whole response back to UI if not streaming and no tool calls - sseStreamer.streamTokenEvent(chatId, JSON.stringify(response, null, 2)) + let responseContent = JSON.stringify(response, null, 2) + if (typeof response.content === 'string') { + responseContent = response.content + } + sseStreamer.streamTokenEvent(chatId, responseContent) } // Calculate execution time @@ -1473,7 +1477,11 @@ class Agent_Agentflow implements INode { // Stream non-streaming response if this is the last node if (isLastNode && sseStreamer) { - sseStreamer.streamTokenEvent(chatId, JSON.stringify(newResponse, null, 2)) + let responseContent = JSON.stringify(newResponse, null, 2) + if (typeof newResponse.content === 'string') { + responseContent = newResponse.content + } + sseStreamer.streamTokenEvent(chatId, responseContent) } } @@ -1715,7 +1723,11 @@ class Agent_Agentflow implements INode { // Stream non-streaming response if this is the last node if (isLastNode && sseStreamer) { - sseStreamer.streamTokenEvent(chatId, JSON.stringify(newResponse, null, 2)) + let responseContent = JSON.stringify(newResponse, null, 2) + if (typeof newResponse.content === 'string') { + responseContent = newResponse.content + } + sseStreamer.streamTokenEvent(chatId, responseContent) } } diff --git a/packages/components/nodes/agentflow/LLM/LLM.ts b/packages/components/nodes/agentflow/LLM/LLM.ts index 31d26c92d..18f8d187d 100644 --- a/packages/components/nodes/agentflow/LLM/LLM.ts +++ b/packages/components/nodes/agentflow/LLM/LLM.ts @@ -460,7 +460,11 @@ class LLM_Agentflow implements INode { // Stream whole response back to UI if this is the last node if (isLastNode && options.sseStreamer) { const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer - sseStreamer.streamTokenEvent(chatId, JSON.stringify(response, null, 2)) + let responseContent = JSON.stringify(response, null, 2) + if (typeof response.content === 'string') { + responseContent = response.content + } + sseStreamer.streamTokenEvent(chatId, responseContent) } }