fix agent and llm nodes when chat models streaming is off

This commit is contained in:
Henry 2025-08-11 19:09:09 +08:00
parent 32cd06cd28
commit ed4cb2d35a
2 changed files with 16 additions and 8 deletions

View File

@ -974,11 +974,15 @@ class Agent_Agentflow implements INode {
}
} else if (!humanInput && !isStreamable && isLastNode && sseStreamer) {
// Stream whole response back to UI if not streaming and no tool calls
let responseContent = JSON.stringify(response, null, 2)
if (typeof response.content === 'string') {
responseContent = response.content
let finalResponse = ''
if (response.content && Array.isArray(response.content)) {
finalResponse = response.content.map((item: any) => item.text).join('\n')
} else if (response.content && typeof response.content === 'string') {
finalResponse = response.content
} else {
finalResponse = JSON.stringify(response, null, 2)
}
sseStreamer.streamTokenEvent(chatId, responseContent)
sseStreamer.streamTokenEvent(chatId, finalResponse)
}
// Calculate execution time

View File

@ -474,11 +474,15 @@ class LLM_Agentflow implements INode {
// Stream whole response back to UI if this is the last node
if (isLastNode && options.sseStreamer) {
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
let responseContent = JSON.stringify(response, null, 2)
if (typeof response.content === 'string') {
responseContent = response.content
let finalResponse = ''
if (response.content && Array.isArray(response.content)) {
finalResponse = response.content.map((item: any) => item.text).join('\n')
} else if (response.content && typeof response.content === 'string') {
finalResponse = response.content
} else {
finalResponse = JSON.stringify(response, null, 2)
}
sseStreamer.streamTokenEvent(chatId, responseContent)
sseStreamer.streamTokenEvent(chatId, finalResponse)
}
}