Feature/Code Interpreter (#3183)

* Base changes for ServerSide Events (instead of socket.io)

* lint fixes

* adding of interface and separate methods for streaming events

* lint

* first draft, handles both internal and external prediction end points.

* lint fixes

* additional internal end point for streaming and associated changes

* return streamresponse as true to build agent flow

* 1) JSON formatting for internal events
2) other fixes

* 1) convert internal event to metadata to maintain consistency with external response

* fix action and metadata streaming

* fix for error when agent flow is aborted

* prevent subflows from streaming and other code cleanup

* prevent streaming from enclosed tools

* add fix for preventing chaintool streaming

* update lock file

* add open when hidden to sse

* Streaming errors

* Streaming errors

* add fix for showing error message

* add code interpreter

* add artifacts to view message dialog

* Update pnpm-lock.yaml

---------

Co-authored-by: Vinod Paidimarry <vinodkiran@outlook.in>
This commit is contained in:
Henry Heng 2024-09-17 08:44:56 +01:00 committed by GitHub
parent 26444ac3ae
commit b02f279e9d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 729 additions and 333 deletions

View File

@ -208,6 +208,7 @@ class OpenAIAssistant_Agents implements INode {
const usedTools: IUsedTool[] = []
const fileAnnotations = []
const artifacts = []
const assistant = await appDataSource.getRepository(databaseEntities['Assistant']).findOneBy({
id: selectedAssistantId
@ -439,21 +440,23 @@ class OpenAIAssistant_Agents implements INode {
const fileId = chunk.image_file.file_id
const fileObj = await openai.files.retrieve(fileId)
const buffer = await downloadImg(openai, fileId, `${fileObj.filename}.png`, options.chatflowid, options.chatId)
const base64String = Buffer.from(buffer).toString('base64')
// TODO: Use a file path and retrieve image on the fly. Storing as base64 to localStorage and database will easily hit limits
const imgHTML = `<img src="data:image/png;base64,${base64String}" width="100%" height="max-content" alt="${fileObj.filename}" /><br/>`
text += imgHTML
const filePath = await downloadImg(
openai,
fileId,
`${fileObj.filename}.png`,
options.chatflowid,
options.chatId
)
artifacts.push({ type: 'png', data: filePath })
if (!isStreamingStarted) {
isStreamingStarted = true
if (sseStreamer) {
sseStreamer.streamStartEvent(chatId, imgHTML)
sseStreamer.streamStartEvent(chatId, ' ')
}
}
if (sseStreamer) {
sseStreamer.streamTokenEvent(chatId, imgHTML)
sseStreamer.streamArtifactsEvent(chatId, artifacts)
}
}
}
@ -565,6 +568,7 @@ class OpenAIAssistant_Agents implements INode {
return {
text,
usedTools,
artifacts,
fileAnnotations,
assistant: { assistantId: openAIAssistantId, threadId, runId: runThreadId, messages: messageData }
}
@ -769,12 +773,8 @@ class OpenAIAssistant_Agents implements INode {
const fileId = content.image_file.file_id
const fileObj = await openai.files.retrieve(fileId)
const buffer = await downloadImg(openai, fileId, `${fileObj.filename}.png`, options.chatflowid, options.chatId)
const base64String = Buffer.from(buffer).toString('base64')
// TODO: Use a file path and retrieve image on the fly. Storing as base64 to localStorage and database will easily hit limits
const imgHTML = `<img src="data:image/png;base64,${base64String}" width="100%" height="max-content" alt="${fileObj.filename}" /><br/>`
returnVal += imgHTML
const filePath = await downloadImg(openai, fileId, `${fileObj.filename}.png`, options.chatflowid, options.chatId)
artifacts.push({ type: 'png', data: filePath })
}
}
@ -787,6 +787,7 @@ class OpenAIAssistant_Agents implements INode {
return {
text: returnVal,
usedTools,
artifacts,
fileAnnotations,
assistant: { assistantId: openAIAssistantId, threadId, runId: runThreadId, messages: messageData }
}
@ -807,9 +808,9 @@ const downloadImg = async (openai: OpenAI, fileId: string, fileName: string, ...
const image_data_buffer = Buffer.from(image_data)
const mime = 'image/png'
await addSingleFileToStorage(mime, image_data_buffer, fileName, ...paths)
const res = await addSingleFileToStorage(mime, image_data_buffer, fileName, ...paths)
return image_data_buffer
return res
}
const downloadFile = async (openAIApiKey: string, fileObj: any, fileName: string, ...paths: string[]) => {

View File

@ -134,6 +134,7 @@ class ToolAgent_Agents implements INode {
let res: ChainValues = {}
let sourceDocuments: ICommonObject[] = []
let usedTools: IUsedTool[] = []
let artifacts = []
if (shouldStreamResponse) {
const handler = new CustomChainHandler(sseStreamer, chatId)
@ -150,6 +151,12 @@ class ToolAgent_Agents implements INode {
}
usedTools = res.usedTools
}
if (res.artifacts) {
if (sseStreamer) {
sseStreamer.streamArtifactsEvent(chatId, flatten(res.artifacts))
}
artifacts = res.artifacts
}
// If the tool is set to returnDirect, stream the output to the client
if (res.usedTools && res.usedTools.length) {
let inputTools = nodeData.inputs?.tools
@ -169,6 +176,9 @@ class ToolAgent_Agents implements INode {
if (res.usedTools) {
usedTools = res.usedTools
}
if (res.artifacts) {
artifacts = res.artifacts
}
}
let output = res?.output
@ -203,7 +213,7 @@ class ToolAgent_Agents implements INode {
let finalRes = output
if (sourceDocuments.length || usedTools.length) {
if (sourceDocuments.length || usedTools.length || artifacts.length) {
const finalRes: ICommonObject = { text: output }
if (sourceDocuments.length) {
finalRes.sourceDocuments = flatten(sourceDocuments)
@ -211,6 +221,9 @@ class ToolAgent_Agents implements INode {
if (usedTools.length) {
finalRes.usedTools = usedTools
}
if (artifacts.length) {
finalRes.artifacts = artifacts
}
return finalRes
}

View File

@ -21,7 +21,7 @@ import {
IDocument,
IStateWithMessages
} from '../../../src/Interface'
import { ToolCallingAgentOutputParser, AgentExecutor, SOURCE_DOCUMENTS_PREFIX } from '../../../src/agents'
import { ToolCallingAgentOutputParser, AgentExecutor, SOURCE_DOCUMENTS_PREFIX, ARTIFACTS_PREFIX } from '../../../src/agents'
import { getInputVariables, getVars, handleEscapeCharacters, prepareSandboxVars } from '../../../src/utils'
import {
customGet,
@ -35,7 +35,6 @@ import {
} from '../commonUtils'
import { END, StateGraph } from '@langchain/langgraph'
import { StructuredTool } from '@langchain/core/tools'
import { DynamicStructuredTool } from '../../tools/CustomTool/core'
const defaultApprovalPrompt = `You are about to execute tool: {tools}. Ask if user want to proceed`
const examplePrompt = 'You are a research assistant who can search for up-to-date info using search engine.'
@ -739,7 +738,12 @@ async function agentNode(
// If the last message is a tool message and is an interrupted message, format output into standard agent output
if (lastMessage._getType() === 'tool' && lastMessage.additional_kwargs?.nodeId === nodeData.id) {
let formattedAgentResult: { output?: string; usedTools?: IUsedTool[]; sourceDocuments?: IDocument[] } = {}
let formattedAgentResult: {
output?: string
usedTools?: IUsedTool[]
sourceDocuments?: IDocument[]
artifacts?: ICommonObject[]
} = {}
formattedAgentResult.output = result.content
if (lastMessage.additional_kwargs?.usedTools) {
formattedAgentResult.usedTools = lastMessage.additional_kwargs.usedTools as IUsedTool[]
@ -747,6 +751,9 @@ async function agentNode(
if (lastMessage.additional_kwargs?.sourceDocuments) {
formattedAgentResult.sourceDocuments = lastMessage.additional_kwargs.sourceDocuments as IDocument[]
}
if (lastMessage.additional_kwargs?.artifacts) {
formattedAgentResult.artifacts = lastMessage.additional_kwargs.artifacts as ICommonObject[]
}
result = formattedAgentResult
} else {
result.name = name
@ -765,12 +772,18 @@ async function agentNode(
if (result.sourceDocuments) {
additional_kwargs.sourceDocuments = result.sourceDocuments
}
if (result.artifacts) {
additional_kwargs.artifacts = result.artifacts
}
if (result.output) {
result.content = result.output
delete result.output
}
const outputContent = typeof result === 'string' ? result : result.content || result.output
let outputContent = typeof result === 'string' ? result : result.content || result.output
// remove invalid markdown image pattern: ![<some-string>](<some-string>)
outputContent = typeof outputContent === 'string' ? outputContent.replace(/!\[.*?\]\(.*?\)/g, '') : outputContent
if (nodeData.inputs?.updateStateMemoryUI || nodeData.inputs?.updateStateMemoryCode) {
let formattedOutput = {
@ -931,6 +944,9 @@ class ToolNode<T extends BaseMessage[] | MessagesState> extends RunnableCallable
// Extract all properties except messages for IStateWithMessages
const { messages: _, ...inputWithoutMessages } = Array.isArray(input) ? { messages: input } : input
const ChannelsWithoutMessages = {
chatId: this.options.chatId,
sessionId: this.options.sessionId,
input: this.inputQuery,
state: inputWithoutMessages
}
@ -940,12 +956,14 @@ class ToolNode<T extends BaseMessage[] | MessagesState> extends RunnableCallable
if (tool === undefined) {
throw new Error(`Tool ${call.name} not found.`)
}
if (tool && tool instanceof DynamicStructuredTool) {
if (tool && (tool as any).setFlowObject) {
// @ts-ignore
tool.setFlowObject(ChannelsWithoutMessages)
}
let output = await tool.invoke(call.args, config)
let sourceDocuments: Document[] = []
let artifacts = []
if (output?.includes(SOURCE_DOCUMENTS_PREFIX)) {
const outputArray = output.split(SOURCE_DOCUMENTS_PREFIX)
output = outputArray[0]
@ -956,12 +974,23 @@ class ToolNode<T extends BaseMessage[] | MessagesState> extends RunnableCallable
console.error('Error parsing source documents from tool')
}
}
if (output?.includes(ARTIFACTS_PREFIX)) {
const outputArray = output.split(ARTIFACTS_PREFIX)
output = outputArray[0]
try {
artifacts = JSON.parse(outputArray[1])
} catch (e) {
console.error('Error parsing artifacts from tool')
}
}
return new ToolMessage({
name: tool.name,
content: typeof output === 'string' ? output : JSON.stringify(output),
tool_call_id: call.id!,
additional_kwargs: {
sourceDocuments,
artifacts,
args: call.args,
usedTools: [
{

View File

@ -12,13 +12,12 @@ import {
import { AIMessage, AIMessageChunk, BaseMessage, ToolMessage } from '@langchain/core/messages'
import { StructuredTool } from '@langchain/core/tools'
import { RunnableConfig } from '@langchain/core/runnables'
import { SOURCE_DOCUMENTS_PREFIX } from '../../../src/agents'
import { ARTIFACTS_PREFIX, SOURCE_DOCUMENTS_PREFIX } from '../../../src/agents'
import { Document } from '@langchain/core/documents'
import { DataSource } from 'typeorm'
import { MessagesState, RunnableCallable, customGet, getVM } from '../commonUtils'
import { getVars, prepareSandboxVars } from '../../../src/utils'
import { ChatPromptTemplate } from '@langchain/core/prompts'
import { DynamicStructuredTool } from '../../tools/CustomTool/core'
const defaultApprovalPrompt = `You are about to execute tool: {tools}. Ask if user want to proceed`
@ -408,6 +407,9 @@ class ToolNode<T extends IStateWithMessages | BaseMessage[] | MessagesState> ext
// Extract all properties except messages for IStateWithMessages
const { messages: _, ...inputWithoutMessages } = Array.isArray(input) ? { messages: input } : input
const ChannelsWithoutMessages = {
chatId: this.options.chatId,
sessionId: this.options.sessionId,
input: this.inputQuery,
state: inputWithoutMessages
}
@ -417,12 +419,13 @@ class ToolNode<T extends IStateWithMessages | BaseMessage[] | MessagesState> ext
if (tool === undefined) {
throw new Error(`Tool ${call.name} not found.`)
}
if (tool && tool instanceof DynamicStructuredTool) {
if (tool && (tool as any).setFlowObject) {
// @ts-ignore
tool.setFlowObject(ChannelsWithoutMessages)
}
let output = await tool.invoke(call.args, config)
let sourceDocuments: Document[] = []
let artifacts = []
if (output?.includes(SOURCE_DOCUMENTS_PREFIX)) {
const outputArray = output.split(SOURCE_DOCUMENTS_PREFIX)
output = outputArray[0]
@ -433,12 +436,23 @@ class ToolNode<T extends IStateWithMessages | BaseMessage[] | MessagesState> ext
console.error('Error parsing source documents from tool')
}
}
if (output?.includes(ARTIFACTS_PREFIX)) {
const outputArray = output.split(ARTIFACTS_PREFIX)
output = outputArray[0]
try {
artifacts = JSON.parse(outputArray[1])
} catch (e) {
console.error('Error parsing artifacts from tool')
}
}
return new ToolMessage({
name: tool.name,
content: typeof output === 'string' ? output : JSON.stringify(output),
tool_call_id: call.id!,
additional_kwargs: {
sourceDocuments,
artifacts,
args: call.args,
usedTools: [
{
@ -489,7 +503,8 @@ const getReturnOutput = async (
tool: output.name,
toolInput: output.additional_kwargs.args,
toolOutput: output.content,
sourceDocuments: output.additional_kwargs.sourceDocuments
sourceDocuments: output.additional_kwargs.sourceDocuments,
artifacts: output.additional_kwargs.artifacts
} as IUsedTool
})

View File

@ -0,0 +1,269 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { StructuredTool, ToolInputParsingException, ToolParams } from '@langchain/core/tools'
import { CodeInterpreter } from '@e2b/code-interpreter'
import { z } from 'zod'
import { addSingleFileToStorage } from '../../../src/storageUtils'
import { CallbackManager, CallbackManagerForToolRun, Callbacks, parseCallbackConfigArg } from '@langchain/core/callbacks/manager'
import { RunnableConfig } from '@langchain/core/runnables'
import { ARTIFACTS_PREFIX } from '../../../src/agents'
const DESC = `Evaluates python code in a sandbox environment. \
The environment is long running and exists across multiple executions. \
You must send the whole script every time and print your outputs. \
Script should be pure python code that can be evaluated. \
It should be in python format NOT markdown. \
The code should NOT be wrapped in backticks. \
All python packages including requests, matplotlib, scipy, numpy, pandas, \
etc are available. Create and display chart using "plt.show()".`
const NAME = 'code_interpreter'
class Code_Interpreter_Tools implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
badge: string
credential: INodeParams
constructor() {
this.label = 'Code Interpreter by E2B'
this.name = 'codeInterpreterE2B'
this.version = 1.0
this.type = 'CodeInterpreter'
this.icon = 'e2b.png'
this.category = 'Tools'
this.description = 'Execute code in a sandbox environment'
this.baseClasses = [this.type, 'Tool', ...getBaseClasses(E2BTool)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['E2BApi'],
optional: true
}
this.inputs = [
{
label: 'Tool Name',
name: 'toolName',
type: 'string',
description: 'Specify the name of the tool',
default: 'code_interpreter'
},
{
label: 'Tool Description',
name: 'toolDesc',
type: 'string',
rows: 4,
description: 'Specify the description of the tool',
default: DESC
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const toolDesc = nodeData.inputs?.toolDesc as string
const toolName = nodeData.inputs?.toolName as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const e2bApiKey = getCredentialParam('e2bApiKey', credentialData, nodeData)
return await E2BTool.initialize({
description: toolDesc ?? DESC,
name: toolName ?? NAME,
apiKey: e2bApiKey,
schema: z.object({
input: z.string().describe('Python code to be executed in the sandbox environment')
}),
chatflowid: options.chatflowid
})
}
}
type E2BToolParams = ToolParams
type E2BToolInput = {
name: string
description: string
apiKey: string
schema: any
chatflowid: string
templateCodeInterpreterE2B?: string
domainCodeInterpreterE2B?: string
}
export class E2BTool extends StructuredTool {
static lc_name() {
return 'E2BTool'
}
name = NAME
description = DESC
instance: CodeInterpreter
apiKey: string
schema
chatflowid: string
flowObj: ICommonObject
templateCodeInterpreterE2B?: string
domainCodeInterpreterE2B?: string
constructor(options: E2BToolParams & E2BToolInput) {
super(options)
this.description = options.description
this.name = options.name
this.apiKey = options.apiKey
this.schema = options.schema
this.chatflowid = options.chatflowid
this.templateCodeInterpreterE2B = options.templateCodeInterpreterE2B
this.domainCodeInterpreterE2B = options.domainCodeInterpreterE2B
}
static async initialize(options: Partial<E2BToolParams> & E2BToolInput) {
return new this({
name: options.name,
description: options.description,
apiKey: options.apiKey,
schema: options.schema,
chatflowid: options.chatflowid,
templateCodeInterpreterE2B: options.templateCodeInterpreterE2B,
domainCodeInterpreterE2B: options.domainCodeInterpreterE2B
})
}
async call(
arg: z.infer<typeof this.schema>,
configArg?: RunnableConfig | Callbacks,
tags?: string[],
flowConfig?: { sessionId?: string; chatId?: string; input?: string; state?: ICommonObject }
): Promise<string> {
const config = parseCallbackConfigArg(configArg)
if (config.runName === undefined) {
config.runName = this.name
}
let parsed
try {
parsed = await this.schema.parseAsync(arg)
} catch (e) {
throw new ToolInputParsingException(`Received tool input did not match expected schema`, JSON.stringify(arg))
}
const callbackManager_ = await CallbackManager.configure(
config.callbacks,
this.callbacks,
config.tags || tags,
this.tags,
config.metadata,
this.metadata,
{ verbose: this.verbose }
)
const runManager = await callbackManager_?.handleToolStart(
this.toJSON(),
typeof parsed === 'string' ? parsed : JSON.stringify(parsed),
undefined,
undefined,
undefined,
undefined,
config.runName
)
let result
try {
result = await this._call(parsed, runManager, flowConfig)
} catch (e) {
await runManager?.handleToolError(e)
throw e
}
if (result && typeof result !== 'string') {
result = JSON.stringify(result)
}
await runManager?.handleToolEnd(result)
return result
}
// @ts-ignore
protected async _call(
arg: z.infer<typeof this.schema>,
_?: CallbackManagerForToolRun,
flowConfig?: { sessionId?: string; chatId?: string; input?: string }
): Promise<string> {
flowConfig = { ...this.flowObj, ...flowConfig }
try {
if ('input' in arg) {
this.instance = await CodeInterpreter.create({ apiKey: this.apiKey })
const execution = await this.instance.notebook.execCell(arg?.input)
const artifacts = []
for (const result of execution.results) {
for (const key in result) {
if (!(result as any)[key]) continue
if (key === 'png') {
//@ts-ignore
const pngData = Buffer.from(result.png, 'base64')
const filename = `artifact_${Date.now()}.png`
const res = await addSingleFileToStorage(
'image/png',
pngData,
filename,
this.chatflowid,
flowConfig!.chatId as string
)
artifacts.push({ type: 'png', data: res })
} else if (key === 'jpeg') {
//@ts-ignore
const jpegData = Buffer.from(result.jpeg, 'base64')
const filename = `artifact_${Date.now()}.jpg`
const res = await addSingleFileToStorage(
'image/jpg',
jpegData,
filename,
this.chatflowid,
flowConfig!.chatId as string
)
artifacts.push({ type: 'jpeg', data: res })
} else if (key === 'html' || key === 'markdown' || key === 'latex' || key === 'json' || key === 'javascript') {
artifacts.push({ type: key, data: (result as any)[key] })
} //TODO: support for pdf
}
}
this.instance.close()
let output = ''
if (execution.text) output = execution.text
if (!execution.text && execution.logs.stdout.length) output = execution.logs.stdout.join('\n')
if (execution.error) {
return `${execution.error.name}: ${execution.error.value}`
}
return artifacts.length > 0 ? output + ARTIFACTS_PREFIX + JSON.stringify(artifacts) : output
} else {
return 'No input provided'
}
} catch (e) {
if (this.instance) this.instance.close()
return typeof e === 'string' ? e : JSON.stringify(e, null, 2)
}
}
setFlowObject(flowObj: ICommonObject) {
this.flowObj = flowObj
}
}
module.exports = { nodeClass: Code_Interpreter_Tools }

View File

Before

Width:  |  Height:  |  Size: 9.0 KiB

After

Width:  |  Height:  |  Size: 9.0 KiB

View File

@ -1,151 +0,0 @@
/*
* TODO: Implement codeInterpreter column to chat_message table
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { StructuredTool, ToolParams } from '@langchain/core/tools'
import { CodeInterpreter } from '@e2b/code-interpreter'
import { z } from 'zod'
const DESC = `Evaluates python code in a sandbox environment. \
The environment is long running and exists across multiple executions. \
You must send the whole script every time and print your outputs. \
Script should be pure python code that can be evaluated. \
It should be in python format NOT markdown. \
The code should NOT be wrapped in backticks. \
All python packages including requests, matplotlib, scipy, numpy, pandas, \
etc are available. Create and display chart using "plt.show()".`
const NAME = 'code_interpreter'
class E2B_Tools implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
badge: string
credential: INodeParams
constructor() {
this.label = 'E2B'
this.name = 'e2b'
this.version = 1.0
this.type = 'E2B'
this.icon = 'e2b.png'
this.category = 'Tools'
this.description = 'Execute code in E2B Code Intepreter'
this.baseClasses = [this.type, 'Tool', ...getBaseClasses(E2BTool)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['E2BApi']
}
this.inputs = [
{
label: 'Tool Name',
name: 'toolName',
type: 'string',
description: 'Specify the name of the tool',
default: 'code_interpreter'
},
{
label: 'Tool Description',
name: 'toolDesc',
type: 'string',
rows: 4,
description: 'Specify the description of the tool',
default: DESC
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const toolDesc = nodeData.inputs?.toolDesc as string
const toolName = nodeData.inputs?.toolName as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const e2bApiKey = getCredentialParam('e2bApiKey', credentialData, nodeData)
const socketIO = options.socketIO
const socketIOClientId = options.socketIOClientId
return await E2BTool.initialize({
description: toolDesc ?? DESC,
name: toolName ?? NAME,
apiKey: e2bApiKey,
schema: z.object({
input: z.string().describe('Python code to be executed in the sandbox environment')
}),
socketIO,
socketIOClientId
})
}
}
type E2BToolParams = ToolParams & { instance: CodeInterpreter }
export class E2BTool extends StructuredTool {
static lc_name() {
return 'E2BTool'
}
name = NAME
description = DESC
instance: CodeInterpreter
apiKey: string
schema
socketIO
socketIOClientId = ''
constructor(options: E2BToolParams & { name: string; description: string, apiKey: string, schema: any, socketIO: any, socketIOClientId: string}) {
super(options)
this.instance = options.instance
this.description = options.description
this.name = options.name
this.apiKey = options.apiKey
this.schema = options.schema
this.returnDirect = true
this.socketIO = options.socketIO
this.socketIOClientId = options.socketIOClientId
}
static async initialize(options: Partial<E2BToolParams> & { name: string; description: string, apiKey: string, schema: any, socketIO: any, socketIOClientId: string }) {
const instance = await CodeInterpreter.create({ apiKey: options.apiKey })
return new this({ instance, name: options.name, description: options.description, apiKey: options.apiKey, schema: options.schema, socketIO: options.socketIO, socketIOClientId: options.socketIOClientId})
}
async _call(args: any) {
try {
if ('input' in args) {
const execution = await this.instance.notebook.execCell(args?.input)
let imgHTML = ''
for (const result of execution.results) {
if (result.png) {
imgHTML += `\n\n<img src="data:image/png;base64,${result.png}" width="100%" height="max-content" alt="image" /><br/>`
}
if (result.jpeg) {
imgHTML += `\n\n<img src="data:image/jpeg;base64,${result.jpeg}" width="100%" height="max-content" alt="image" /><br/>`
}
}
const output = execution.text ? execution.text + imgHTML : imgHTML
if (this.socketIO && this.socketIOClientId) this.socketIO.to(this.socketIOClientId).emit('token', output)
return output
} else {
return 'No input provided'
}
} catch (e) {
return typeof e === 'string' ? e : JSON.stringify(e, null, 2)
}
}
}
module.exports = { nodeClass: E2B_Tools }
*/

View File

@ -1,127 +0,0 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { loadPyodide, type PyodideInterface } from 'pyodide'
import { Tool, ToolParams } from '@langchain/core/tools'
import * as path from 'path'
import { getUserHome } from '../../../src/utils'
let pyodideInstance: PyodideInterface | undefined
const DESC = `Evaluates python code in a sandbox environment. The environment resets on every execution. You must send the whole script every time and print your outputs. Script should be pure python code that can be evaluated. Use only packages available in Pyodide.`
const NAME = 'python_interpreter'
async function LoadPyodide(): Promise<PyodideInterface> {
if (pyodideInstance === undefined) {
const obj = { packageCacheDir: path.join(getUserHome(), '.flowise', 'pyodideCacheDir') }
pyodideInstance = await loadPyodide(obj)
}
return pyodideInstance
}
class PythonInterpreter_Tools implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
badge: string
constructor() {
this.label = 'Python Interpreter'
this.name = 'pythonInterpreter'
this.version = 1.0
this.type = 'PythonInterpreter'
this.icon = 'python.svg'
this.category = 'Tools'
this.description = 'Execute python code in Pyodide sandbox environment'
this.baseClasses = [this.type, 'Tool', ...getBaseClasses(PythonInterpreterTool)]
this.inputs = [
{
label: 'Tool Name',
name: 'toolName',
type: 'string',
description: 'Specify the name of the tool',
default: 'python_interpreter'
},
{
label: 'Tool Description',
name: 'toolDesc',
type: 'string',
rows: 4,
description: 'Specify the description of the tool',
default: DESC
}
]
}
async init(nodeData: INodeData): Promise<any> {
const toolDesc = nodeData.inputs?.toolDesc as string
const toolName = nodeData.inputs?.toolName as string
return await PythonInterpreterTool.initialize({
description: toolDesc ?? DESC,
name: toolName ?? NAME
})
}
}
type PythonInterpreterToolParams = Parameters<typeof loadPyodide>[0] &
ToolParams & {
instance: PyodideInterface
}
export class PythonInterpreterTool extends Tool {
static lc_name() {
return 'PythonInterpreterTool'
}
name = NAME
description = DESC
pyodideInstance: PyodideInterface
stdout = ''
stderr = ''
constructor(options: PythonInterpreterToolParams & { name: string; description: string }) {
super(options)
this.description = options.description
this.name = options.name
this.pyodideInstance = options.instance
this.pyodideInstance.setStderr({
batched: (text: string) => {
this.stderr += text
}
})
this.pyodideInstance.setStdout({
batched: (text: string) => {
this.stdout += text
}
})
}
static async initialize(options: Partial<PythonInterpreterToolParams> & { name: string; description: string }) {
const instance = await LoadPyodide()
return new this({ instance, name: options.name, description: options.description })
}
async _call(script: string) {
this.stdout = ''
this.stderr = ''
try {
await this.pyodideInstance.loadPackagesFromImports(script)
await this.pyodideInstance.runPythonAsync(script)
return JSON.stringify({ stdout: this.stdout, stderr: this.stderr }, null, 2)
} catch (e) {
return typeof e === 'string' ? e : JSON.stringify(e, null, 2)
}
}
}
module.exports = { nodeClass: PythonInterpreter_Tools }

View File

@ -1 +0,0 @@
<svg class="mr-1.5" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M15.84.5a16.4,16.4,0,0,0-3.57.32C9.1,1.39,8.53,2.53,8.53,4.64V7.48H16v1H5.77a4.73,4.73,0,0,0-4.7,3.74,14.82,14.82,0,0,0,0,7.54c.57,2.28,1.86,3.82,4,3.82h2.6V20.14a4.73,4.73,0,0,1,4.63-4.63h7.38a3.72,3.72,0,0,0,3.73-3.73V4.64A4.16,4.16,0,0,0,19.65.82,20.49,20.49,0,0,0,15.84.5ZM11.78,2.77a1.39,1.39,0,0,1,1.38,1.46,1.37,1.37,0,0,1-1.38,1.38A1.42,1.42,0,0,1,10.4,4.23,1.44,1.44,0,0,1,11.78,2.77Z" fill="#5a9fd4"></path><path d="M16.16,31.5a16.4,16.4,0,0,0,3.57-.32c3.17-.57,3.74-1.71,3.74-3.82V24.52H16v-1H26.23a4.73,4.73,0,0,0,4.7-3.74,14.82,14.82,0,0,0,0-7.54c-.57-2.28-1.86-3.82-4-3.82h-2.6v3.41a4.73,4.73,0,0,1-4.63,4.63H12.35a3.72,3.72,0,0,0-3.73,3.73v7.14a4.16,4.16,0,0,0,3.73,3.82A20.49,20.49,0,0,0,16.16,31.5Zm4.06-2.27a1.39,1.39,0,0,1-1.38-1.46,1.37,1.37,0,0,1,1.38-1.38,1.42,1.42,0,0,1,1.38,1.38A1.44,1.44,0,0,1,20.22,29.23Z" fill="#ffd43b"></path></svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@ -415,7 +415,7 @@ export interface IServerSideEventStreamer {
streamAgentReasoningEvent(chatId: string, data: any): void
streamNextAgentEvent(chatId: string, data: any): void
streamActionEvent(chatId: string, data: any): void
streamArtifactsEvent(chatId: string, data: any): void
streamAbortEvent(chatId: string): void
streamEndEvent(chatId: string): void
}

View File

@ -26,6 +26,8 @@ import { formatLogToString } from 'langchain/agents/format_scratchpad/log'
import { IUsedTool } from './Interface'
export const SOURCE_DOCUMENTS_PREFIX = '\n\n----FLOWISE_SOURCE_DOCUMENTS----\n\n'
export const ARTIFACTS_PREFIX = '\n\n----FLOWISE_ARTIFACTS----\n\n'
export type AgentFinish = {
returnValues: Record<string, any>
log: string
@ -345,12 +347,14 @@ export class AgentExecutor extends BaseChain<ChainValues, AgentExecutorOutput> {
let iterations = 0
let sourceDocuments: Array<Document> = []
const usedTools: IUsedTool[] = []
let artifacts: any[] = []
const getOutput = async (finishStep: AgentFinish): Promise<AgentExecutorOutput> => {
const { returnValues } = finishStep
const additional = await this.agent.prepareForOutput(returnValues, steps)
if (sourceDocuments.length) additional.sourceDocuments = flatten(sourceDocuments)
if (usedTools.length) additional.usedTools = usedTools
if (artifacts.length) additional.artifacts = flatten(artifacts)
if (this.returnIntermediateSteps) {
return { ...returnValues, intermediateSteps: steps, ...additional }
}
@ -426,13 +430,17 @@ export class AgentExecutor extends BaseChain<ChainValues, AgentExecutorOutput> {
state: inputs
}
)
let toolOutput = observation
if (typeof toolOutput === 'string' && toolOutput.includes(SOURCE_DOCUMENTS_PREFIX)) {
toolOutput = toolOutput.split(SOURCE_DOCUMENTS_PREFIX)[0]
}
if (typeof toolOutput === 'string' && toolOutput.includes(ARTIFACTS_PREFIX)) {
toolOutput = toolOutput.split(ARTIFACTS_PREFIX)[0]
}
usedTools.push({
tool: tool.name,
toolInput: action.toolInput as any,
toolOutput:
typeof observation === 'string' && observation.includes(SOURCE_DOCUMENTS_PREFIX)
? observation.split(SOURCE_DOCUMENTS_PREFIX)[0]
: observation
toolOutput
})
} else {
observation = `${action.tool} is not a valid tool, try another one.`
@ -463,6 +471,16 @@ export class AgentExecutor extends BaseChain<ChainValues, AgentExecutorOutput> {
console.error('Error parsing source documents from tool')
}
}
if (typeof observation === 'string' && observation.includes(ARTIFACTS_PREFIX)) {
const observationArray = observation.split(ARTIFACTS_PREFIX)
observation = observationArray[0]
try {
const artifact = JSON.parse(observationArray[1])
artifacts.push(artifact)
} catch (e) {
console.error('Error parsing source documents from tool')
}
}
return { action, observation: observation ?? '' }
})
)
@ -566,6 +584,10 @@ export class AgentExecutor extends BaseChain<ChainValues, AgentExecutorOutput> {
const observationArray = observation.split(SOURCE_DOCUMENTS_PREFIX)
observation = observationArray[0]
}
if (typeof observation === 'string' && observation.includes(ARTIFACTS_PREFIX)) {
const observationArray = observation.split(ARTIFACTS_PREFIX)
observation = observationArray[0]
}
} catch (e) {
if (e instanceof ToolInputParsingException) {
if (this.handleParsingErrors === true) {

View File

@ -42,6 +42,7 @@ export interface IChatMessage {
fileAnnotations?: string
agentReasoning?: string
fileUploads?: string
artifacts?: string
chatType: string
chatId: string
memoryType?: string

View File

@ -209,6 +209,9 @@ const parseAPIResponse = (apiResponse: ChatMessage | ChatMessage[]): ChatMessage
if (parsedResponse.action) {
parsedResponse.action = JSON.parse(parsedResponse.action)
}
if (parsedResponse.artifacts) {
parsedResponse.artifacts = JSON.parse(parsedResponse.artifacts)
}
return parsedResponse
}

View File

@ -32,6 +32,9 @@ export class ChatMessage implements IChatMessage {
@Column({ nullable: true, type: 'text' })
fileUploads?: string
@Column({ nullable: true, type: 'text' })
artifacts?: string
@Column({ nullable: true, type: 'text' })
action?: string | null

View File

@ -0,0 +1,11 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class AddArtifactsToChatMessage1726156258465 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`ALTER TABLE "chat_message" ADD COLUMN "artifacts" TEXT;`)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`ALTER TABLE "chat_message" DROP COLUMN "artifacts";`)
}
}

View File

@ -23,6 +23,7 @@ import { AddAgentReasoningToChatMessage1714679514451 } from './1714679514451-Add
import { AddTypeToChatFlow1716300000000 } from './1716300000000-AddTypeToChatFlow'
import { AddApiKey1720230151480 } from './1720230151480-AddApiKey'
import { AddActionToChatMessage1721078251523 } from './1721078251523-AddActionToChatMessage'
import { AddArtifactsToChatMessage1726156258465 } from './1726156258465-AddArtifactsToChatMessage'
import { AddCustomTemplate1725629836652 } from './1725629836652-AddCustomTemplate'
export const sqliteMigrations = [
@ -51,5 +52,6 @@ export const sqliteMigrations = [
AddVectorStoreConfigToDocStore1715861032479,
AddApiKey1720230151480,
AddActionToChatMessage1721078251523,
AddArtifactsToChatMessage1726156258465,
AddCustomTemplate1725629836652
]

View File

@ -97,6 +97,16 @@ export class SSEStreamer implements IServerSideEventStreamer {
client.response.write('message:\ndata:' + JSON.stringify(clientResponse) + '\n\n')
}
}
streamArtifactsEvent(chatId: string, data: any) {
const client = this.clients[chatId]
if (client) {
const clientResponse = {
event: 'artifacts',
data: data
}
client.response.write('message:\ndata:' + JSON.stringify(clientResponse) + '\n\n')
}
}
streamUsedToolsEvent(chatId: string, data: any): void {
const client = this.clients[chatId]
if (client) {

View File

@ -154,6 +154,7 @@ export const buildAgentGraph = async (
let finalAction: IAction = {}
let totalSourceDocuments: IDocument[] = []
let totalUsedTools: IUsedTool[] = []
let totalArtifacts: ICommonObject[] = []
const workerNodes = reactFlowNodes.filter((node) => node.data.name === 'worker')
const supervisorNodes = reactFlowNodes.filter((node) => node.data.name === 'supervisor')
@ -221,6 +222,9 @@ export const buildAgentGraph = async (
const sourceDocuments = output[agentName]?.messages
? output[agentName].messages.map((msg: BaseMessage) => msg.additional_kwargs?.sourceDocuments)
: []
const artifacts = output[agentName]?.messages
? output[agentName].messages.map((msg: BaseMessage) => msg.additional_kwargs?.artifacts)
: []
const messages = output[agentName]?.messages
? output[agentName].messages.map((msg: BaseMessage) => (typeof msg === 'string' ? msg : msg.content))
: []
@ -240,6 +244,11 @@ export const buildAgentGraph = async (
if (cleanedDocs.length) totalSourceDocuments.push(...cleanedDocs)
}
if (artifacts && artifacts.length) {
const cleanedArtifacts = artifacts.filter((artifact: ICommonObject) => artifact)
if (cleanedArtifacts.length) totalArtifacts.push(...cleanedArtifacts)
}
/*
* Check if the next node is a condition node, if yes, then add the agent reasoning of the condition node
*/
@ -273,6 +282,7 @@ export const buildAgentGraph = async (
instructions: output[agentName]?.instructions,
usedTools: flatten(usedTools) as IUsedTool[],
sourceDocuments: flatten(sourceDocuments) as Document[],
artifacts: flatten(artifacts) as ICommonObject[],
state,
nodeName: isSequential ? mapNameToLabel[agentName].nodeName : undefined,
nodeId
@ -395,10 +405,12 @@ export const buildAgentGraph = async (
totalSourceDocuments = uniq(flatten(totalSourceDocuments))
totalUsedTools = uniq(flatten(totalUsedTools))
totalArtifacts = uniq(flatten(totalArtifacts))
if (shouldStreamResponse && sseStreamer) {
sseStreamer.streamUsedToolsEvent(chatId, totalUsedTools)
sseStreamer.streamSourceDocumentsEvent(chatId, totalSourceDocuments)
sseStreamer.streamArtifactsEvent(chatId, totalArtifacts)
sseStreamer.streamEndEvent(chatId)
}
@ -406,6 +418,7 @@ export const buildAgentGraph = async (
finalResult,
finalAction,
sourceDocuments: totalSourceDocuments,
artifacts: totalArtifacts,
usedTools: totalUsedTools,
agentReasoning
}

View File

@ -420,6 +420,8 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals
if (result?.sourceDocuments) apiMessage.sourceDocuments = JSON.stringify(result.sourceDocuments)
if (result?.usedTools) apiMessage.usedTools = JSON.stringify(result.usedTools)
if (result?.fileAnnotations) apiMessage.fileAnnotations = JSON.stringify(result.fileAnnotations)
if (result?.artifacts) apiMessage.artifacts = JSON.stringify(result.artifacts)
const chatMessage = await utilAddChatMessage(apiMessage)
logger.debug(`[server]: Finished running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`)
@ -481,7 +483,7 @@ const utilBuildAgentResponse = async (
shouldStreamResponse
)
if (streamResults) {
const { finalResult, finalAction, sourceDocuments, usedTools, agentReasoning } = streamResults
const { finalResult, finalAction, sourceDocuments, artifacts, usedTools, agentReasoning } = streamResults
const userMessage: Omit<IChatMessage, 'id'> = {
role: 'userMessage',
content: incomingInput.question,
@ -506,6 +508,7 @@ const utilBuildAgentResponse = async (
sessionId
}
if (sourceDocuments?.length) apiMessage.sourceDocuments = JSON.stringify(sourceDocuments)
if (artifacts?.length) apiMessage.artifacts = JSON.stringify(artifacts)
if (usedTools?.length) apiMessage.usedTools = JSON.stringify(usedTools)
if (agentReasoning?.length) apiMessage.agentReasoning = JSON.stringify(agentReasoning)
if (finalAction && Object.keys(finalAction).length) apiMessage.action = JSON.stringify(finalAction)

View File

@ -8,6 +8,7 @@ import rehypeRaw from 'rehype-raw'
import remarkGfm from 'remark-gfm'
import remarkMath from 'remark-math'
import axios from 'axios'
import { cloneDeep } from 'lodash'
// material-ui
import {
@ -207,7 +208,16 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => {
if (chatmsg.fileAnnotations) msg.fileAnnotations = chatmsg.fileAnnotations
if (chatmsg.feedback) msg.feedback = chatmsg.feedback?.content
if (chatmsg.agentReasoning) msg.agentReasoning = chatmsg.agentReasoning
if (chatmsg.artifacts) {
obj.artifacts = chatmsg.artifacts
obj.artifacts.forEach((artifact) => {
if (artifact.type === 'png' || artifact.type === 'jpeg') {
artifact.data = `${baseURL}/api/v1/get-upload-file?chatflowId=${chatmsg.chatflowid}&chatId=${
chatmsg.chatId
}&fileName=${artifact.data.replace('FILE-STORAGE::', '')}`
}
})
}
if (!Object.prototype.hasOwnProperty.call(obj, chatPK)) {
obj[chatPK] = {
id: chatmsg.chatId,
@ -341,7 +351,16 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => {
if (chatmsg.usedTools) obj.usedTools = chatmsg.usedTools
if (chatmsg.fileAnnotations) obj.fileAnnotations = chatmsg.fileAnnotations
if (chatmsg.agentReasoning) obj.agentReasoning = chatmsg.agentReasoning
if (chatmsg.artifacts) {
obj.artifacts = chatmsg.artifacts
obj.artifacts.forEach((artifact) => {
if (artifact.type === 'png' || artifact.type === 'jpeg') {
artifact.data = `${baseURL}/api/v1/get-upload-file?chatflowId=${chatmsg.chatflowid}&chatId=${
chatmsg.chatId
}&fileName=${artifact.data.replace('FILE-STORAGE::', '')}`
}
})
}
loadedMessages.push(obj)
}
setChatMessages(loadedMessages)
@ -574,6 +593,83 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [feedbackTypeFilter])
const agentReasoningArtifacts = (artifacts) => {
const newArtifacts = cloneDeep(artifacts)
for (let i = 0; i < newArtifacts.length; i++) {
const artifact = newArtifacts[i]
if (artifact && (artifact.type === 'png' || artifact.type === 'jpeg')) {
const data = artifact.data
newArtifacts[i].data = `${baseURL}/api/v1/get-upload-file?chatflowId=${
dialogProps.chatflow.id
}&chatId=${selectedChatId}&fileName=${data.replace('FILE-STORAGE::', '')}`
}
}
return newArtifacts
}
const renderArtifacts = (item, index, isAgentReasoning) => {
if (item.type === 'png' || item.type === 'jpeg') {
return (
<Card
key={index}
sx={{
p: 0,
m: 0,
mt: 2,
mb: 2,
flex: '0 0 auto'
}}
>
<CardMedia
component='img'
image={item.data}
sx={{ height: 'auto' }}
alt={'artifact'}
style={{
width: isAgentReasoning ? '200px' : '100%',
height: isAgentReasoning ? '200px' : 'auto',
objectFit: 'cover'
}}
/>
</Card>
)
} else if (item.type === 'html') {
return (
<div style={{ marginTop: '20px' }}>
<div dangerouslySetInnerHTML={{ __html: item.data }}></div>
</div>
)
} else {
return (
<MemoizedReactMarkdown
remarkPlugins={[remarkGfm, remarkMath]}
rehypePlugins={[rehypeMathjax, rehypeRaw]}
components={{
code({ inline, className, children, ...props }) {
const match = /language-(\w+)/.exec(className || '')
return !inline ? (
<CodeBlock
key={Math.random()}
chatflowid={dialogProps.chatflow.id}
isDialog={true}
language={(match && match[1]) || ''}
value={String(children).replace(/\n$/, '')}
{...props}
/>
) : (
<code className={className} {...props}>
{children}
</code>
)
}
}}
>
{item.data}
</MemoizedReactMarkdown>
)
}
}
const component = show ? (
<Dialog
onClose={onCancel}
@ -879,24 +975,6 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => {
width: '100%'
}}
>
{message.usedTools && (
<div style={{ display: 'block', flexDirection: 'row', width: '100%' }}>
{message.usedTools.map((tool, index) => {
return (
<Chip
size='small'
key={index}
label={tool.tool}
component='a'
sx={{ mr: 1, mt: 1 }}
variant='outlined'
clickable
onClick={() => onSourceDialogClick(tool, 'Used Tools')}
/>
)
})}
</div>
)}
{message.fileUploads && message.fileUploads.length > 0 && (
<div
style={{
@ -1008,6 +1086,31 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => {
/>
</div>
)}
{agent.artifacts && (
<div
style={{
display: 'flex',
flexWrap: 'wrap',
flexDirection: 'row',
width: '100%',
gap: '8px'
}}
>
{agentReasoningArtifacts(
agent.artifacts
).map((item, index) => {
return item !== null ? (
<>
{renderArtifacts(
item,
index,
true
)}
</>
) : null
})}
</div>
)}
{agent.messages.length > 0 && (
<MemoizedReactMarkdown
remarkPlugins={[remarkGfm, remarkMath]}
@ -1025,8 +1128,10 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => {
return !inline ? (
<CodeBlock
key={Math.random()}
chatflowid={chatflowid}
isDialog={isDialog}
chatflowid={
dialogProps.chatflow.id
}
isDialog={true}
language={
(match && match[1]) ||
''
@ -1122,6 +1227,40 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => {
})}
</div>
)}
{message.usedTools && (
<div style={{ display: 'block', flexDirection: 'row', width: '100%' }}>
{message.usedTools.map((tool, index) => {
return (
<Chip
size='small'
key={index}
label={tool.tool}
component='a'
sx={{ mr: 1, mt: 1 }}
variant='outlined'
clickable
onClick={() => onSourceDialogClick(tool, 'Used Tools')}
/>
)
})}
</div>
)}
{message.artifacts && (
<div
style={{
display: 'flex',
flexWrap: 'wrap',
flexDirection: 'column',
width: '100%'
}}
>
{message.artifacts.map((item, index) => {
return item !== null ? (
<>{renderArtifacts(item, index)}</>
) : null
})}
</div>
)}
<div className='markdownanswer'>
{/* Messages are being rendered in Markdown format */}
<MemoizedReactMarkdown

View File

@ -534,6 +534,23 @@ export const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, preview
})
}
const updateLastMessageArtifacts = (artifacts) => {
artifacts.forEach((artifact) => {
if (artifact.type === 'png' || artifact.type === 'jpeg') {
artifact.data = `${baseURL}/api/v1/get-upload-file?chatflowId=${chatflowid}&chatId=${chatId}&fileName=${artifact.data.replace(
'FILE-STORAGE::',
''
)}`
}
})
setMessages((prevMessages) => {
let allMessages = [...cloneDeep(prevMessages)]
if (allMessages[allMessages.length - 1].type === 'userMessage') return allMessages
allMessages[allMessages.length - 1].artifacts = artifacts
return allMessages
})
}
const updateLastMessageNextAgent = (nextAgent) => {
setMessages((prevMessages) => {
let allMessages = [...cloneDeep(prevMessages)]
@ -730,6 +747,7 @@ export const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, preview
fileAnnotations: data?.fileAnnotations,
agentReasoning: data?.agentReasoning,
action: data?.action,
artifacts: data?.artifacts,
type: 'apiMessage',
feedback: null
}
@ -792,6 +810,9 @@ export const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, preview
case 'agentReasoning':
updateLastMessageAgentReasoning(payload.data)
break
case 'artifacts':
updateLastMessageArtifacts(payload.data)
break
case 'action':
updateLastMessageAction(payload.data)
break
@ -913,6 +934,17 @@ export const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, preview
if (message.fileAnnotations) obj.fileAnnotations = message.fileAnnotations
if (message.agentReasoning) obj.agentReasoning = message.agentReasoning
if (message.action) obj.action = message.action
if (message.artifacts) {
obj.artifacts = message.artifacts
obj.artifacts.forEach((artifact) => {
if (artifact.type === 'png' || artifact.type === 'jpeg') {
artifact.data = `${baseURL}/api/v1/get-upload-file?chatflowId=${chatflowid}&chatId=${chatId}&fileName=${artifact.data.replace(
'FILE-STORAGE::',
''
)}`
}
})
}
if (message.fileUploads) {
obj.fileUploads = message.fileUploads
obj.fileUploads.forEach((file) => {
@ -1260,6 +1292,84 @@ export const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, preview
}
}
const agentReasoningArtifacts = (artifacts) => {
const newArtifacts = cloneDeep(artifacts)
for (let i = 0; i < newArtifacts.length; i++) {
const artifact = newArtifacts[i]
if (artifact && (artifact.type === 'png' || artifact.type === 'jpeg')) {
const data = artifact.data
newArtifacts[i].data = `${baseURL}/api/v1/get-upload-file?chatflowId=${chatflowid}&chatId=${chatId}&fileName=${data.replace(
'FILE-STORAGE::',
''
)}`
}
}
return newArtifacts
}
const renderArtifacts = (item, index, isAgentReasoning) => {
if (item.type === 'png' || item.type === 'jpeg') {
return (
<Card
key={index}
sx={{
p: 0,
m: 0,
mt: 2,
mb: 2,
flex: '0 0 auto'
}}
>
<CardMedia
component='img'
image={item.data}
sx={{ height: 'auto' }}
alt={'artifact'}
style={{
width: isAgentReasoning ? '200px' : '100%',
height: isAgentReasoning ? '200px' : 'auto',
objectFit: 'cover'
}}
/>
</Card>
)
} else if (item.type === 'html') {
return (
<div style={{ marginTop: '20px' }}>
<div dangerouslySetInnerHTML={{ __html: item.data }}></div>
</div>
)
} else {
return (
<MemoizedReactMarkdown
remarkPlugins={[remarkGfm, remarkMath]}
rehypePlugins={[rehypeMathjax, rehypeRaw]}
components={{
code({ inline, className, children, ...props }) {
const match = /language-(\w+)/.exec(className || '')
return !inline ? (
<CodeBlock
key={Math.random()}
chatflowid={chatflowid}
isDialog={isDialog}
language={(match && match[1]) || ''}
value={String(children).replace(/\n$/, '')}
{...props}
/>
) : (
<code className={className} {...props}>
{children}
</code>
)
}
}}
>
{item.data}
</MemoizedReactMarkdown>
)
}
}
return (
<div onDragEnter={handleDrag}>
{isDragActive && (
@ -1459,6 +1569,23 @@ export const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, preview
/>
</div>
)}
{agent.artifacts && (
<div
style={{
display: 'flex',
flexWrap: 'wrap',
flexDirection: 'row',
width: '100%',
gap: '8px'
}}
>
{agentReasoningArtifacts(agent.artifacts).map((item, index) => {
return item !== null ? (
<>{renderArtifacts(item, index, true)}</>
) : null
})}
</div>
)}
{agent.messages.length > 0 && (
<MemoizedReactMarkdown
remarkPlugins={[remarkGfm, remarkMath]}
@ -1553,6 +1680,20 @@ export const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, preview
})}
</div>
)}
{message.artifacts && (
<div
style={{
display: 'flex',
flexWrap: 'wrap',
flexDirection: 'column',
width: '100%'
}}
>
{message.artifacts.map((item, index) => {
return item !== null ? <>{renderArtifacts(item, index)}</> : null
})}
</div>
)}
<div className='markdownanswer'>
{message.type === 'leadCaptureMessage' &&
!getLocalStorageChatflow(chatflowid)?.lead &&