Bugfix/ui streaming when model streaming is off (#4424)

fix ui streaming when model streaming is off
This commit is contained in:
Henry Heng 2025-05-14 00:07:54 +08:00 committed by GitHub
parent a8f990c242
commit 98e75ad7d6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 20 additions and 4 deletions

View File

@ -903,7 +903,11 @@ class Agent_Agentflow implements INode {
} }
} else if (!humanInput && !isStreamable && isLastNode && sseStreamer) { } else if (!humanInput && !isStreamable && isLastNode && sseStreamer) {
// Stream whole response back to UI if not streaming and no tool calls // Stream whole response back to UI if not streaming and no tool calls
sseStreamer.streamTokenEvent(chatId, JSON.stringify(response, null, 2)) let responseContent = JSON.stringify(response, null, 2)
if (typeof response.content === 'string') {
responseContent = response.content
}
sseStreamer.streamTokenEvent(chatId, responseContent)
} }
// Calculate execution time // Calculate execution time
@ -1473,7 +1477,11 @@ class Agent_Agentflow implements INode {
// Stream non-streaming response if this is the last node // Stream non-streaming response if this is the last node
if (isLastNode && sseStreamer) { if (isLastNode && sseStreamer) {
sseStreamer.streamTokenEvent(chatId, JSON.stringify(newResponse, null, 2)) let responseContent = JSON.stringify(newResponse, null, 2)
if (typeof newResponse.content === 'string') {
responseContent = newResponse.content
}
sseStreamer.streamTokenEvent(chatId, responseContent)
} }
} }
@ -1715,7 +1723,11 @@ class Agent_Agentflow implements INode {
// Stream non-streaming response if this is the last node // Stream non-streaming response if this is the last node
if (isLastNode && sseStreamer) { if (isLastNode && sseStreamer) {
sseStreamer.streamTokenEvent(chatId, JSON.stringify(newResponse, null, 2)) let responseContent = JSON.stringify(newResponse, null, 2)
if (typeof newResponse.content === 'string') {
responseContent = newResponse.content
}
sseStreamer.streamTokenEvent(chatId, responseContent)
} }
} }

View File

@ -460,7 +460,11 @@ class LLM_Agentflow implements INode {
// Stream whole response back to UI if this is the last node // Stream whole response back to UI if this is the last node
if (isLastNode && options.sseStreamer) { if (isLastNode && options.sseStreamer) {
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
sseStreamer.streamTokenEvent(chatId, JSON.stringify(response, null, 2)) let responseContent = JSON.stringify(response, null, 2)
if (typeof response.content === 'string') {
responseContent = response.content
}
sseStreamer.streamTokenEvent(chatId, responseContent)
} }
} }