Merge pull request #123 from FlowiseAI/feature/LocalAI

Feature/Add LocalAI
This commit is contained in:
Henry Heng 2023-05-19 13:14:28 +01:00 committed by GitHub
commit faebc8a665
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 263 additions and 171 deletions

View File

@ -1,10 +1,10 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor, InitializeAgentExecutorOptions } from 'langchain/agents'
import { Tool } from 'langchain/tools'
import { BaseChatModel } from 'langchain/chat_models/base'
import { BaseChatMemory, ChatMessageHistory } from 'langchain/memory'
import { getBaseClasses } from '../../../src/utils'
import { AIChatMessage, HumanChatMessage } from 'langchain/schema'
import { BaseLanguageModel } from 'langchain/base_language'
class ConversationalAgent_Agents implements INode {
label: string
@ -32,9 +32,9 @@ class ConversationalAgent_Agents implements INode {
list: true
},
{
label: 'Chat Model',
label: 'Language Model',
name: 'model',
type: 'BaseChatModel'
type: 'BaseLanguageModel'
},
{
label: 'Memory',
@ -61,7 +61,7 @@ class ConversationalAgent_Agents implements INode {
}
async init(nodeData: INodeData): Promise<any> {
const model = nodeData.inputs?.model as BaseChatModel
const model = nodeData.inputs?.model as BaseLanguageModel
let tools = nodeData.inputs?.tools as Tool[]
tools = tools.flat()
const memory = nodeData.inputs?.memory as BaseChatMemory

View File

@ -1,8 +1,8 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents'
import { BaseChatModel } from 'langchain/chat_models/base'
import { getBaseClasses } from '../../../src/utils'
import { Tool } from 'langchain/tools'
import { BaseLanguageModel } from 'langchain/base_language'
class MRKLAgentChat_Agents implements INode {
label: string
@ -30,15 +30,15 @@ class MRKLAgentChat_Agents implements INode {
list: true
},
{
label: 'Chat Model',
label: 'Language Model',
name: 'model',
type: 'BaseChatModel'
type: 'BaseLanguageModel'
}
]
}
async init(nodeData: INodeData): Promise<any> {
const model = nodeData.inputs?.model as BaseChatModel
const model = nodeData.inputs?.model as BaseLanguageModel
let tools = nodeData.inputs?.tools as Tool[]
tools = tools.flat()
const executor = await initializeAgentExecutorWithOptions(tools, model, {

View File

@ -0,0 +1,92 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { OpenAIChat } from 'langchain/llms/openai'
import { OpenAIChatInput } from 'langchain/chat_models/openai'
class ChatLocalAI_ChatModels implements INode {
label: string
name: string
type: string
icon: string
category: string
description: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'ChatLocalAI'
this.name = 'chatLocalAI'
this.type = 'ChatLocalAI'
this.icon = 'localai.png'
this.category = 'Chat Models'
this.description = 'Use local LLMs like llama.cpp, gpt4all using LocalAI'
this.baseClasses = [this.type, ...getBaseClasses(OpenAIChat)]
this.inputs = [
{
label: 'Base Path',
name: 'basePath',
type: 'string',
placeholder: 'http://localhost:8080/v1'
},
{
label: 'Model Name',
name: 'modelName',
type: 'string',
placeholder: 'gpt4all-lora-quantized.bin'
},
{
label: 'Temperature',
name: 'temperature',
type: 'number',
default: 0.9,
optional: true
},
{
label: 'Max Tokens',
name: 'maxTokens',
type: 'number',
optional: true,
additionalParams: true
},
{
label: 'Top Probability',
name: 'topP',
type: 'number',
optional: true,
additionalParams: true
},
{
label: 'Timeout',
name: 'timeout',
type: 'number',
optional: true,
additionalParams: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string
const maxTokens = nodeData.inputs?.maxTokens as string
const topP = nodeData.inputs?.topP as string
const timeout = nodeData.inputs?.timeout as string
const basePath = nodeData.inputs?.basePath as string
const obj: Partial<OpenAIChatInput> & { openAIApiKey?: string } = {
temperature: parseInt(temperature, 10),
modelName,
openAIApiKey: 'sk-'
}
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
if (topP) obj.topP = parseInt(topP, 10)
if (timeout) obj.timeout = parseInt(timeout, 10)
const model = new OpenAIChat(obj, { basePath })
return model
}
}
module.exports = { nodeClass: ChatLocalAI_ChatModels }

Binary file not shown.

After

Width:  |  Height:  |  Size: 141 KiB

View File

@ -41,7 +41,7 @@
},
{
"width": 300,
"height": 279,
"height": 278,
"id": "aiPlugin_1",
"position": {
"x": 1112.7719277555257,
@ -127,7 +127,7 @@
},
{
"width": 300,
"height": 526,
"height": 524,
"id": "chatOpenAI_1",
"position": {
"x": 800.6881372203391,
@ -258,15 +258,15 @@
},
{
"width": 300,
"height": 281,
"id": "mrklAgentChat_1",
"height": 280,
"id": "mrklAgentChat_2",
"position": {
"x": 1484.694717249578,
"y": 460.4139893408708
"x": 1503.5956978371041,
"y": 418.46259909490925
},
"type": "customNode",
"data": {
"id": "mrklAgentChat_1",
"id": "mrklAgentChat_2",
"label": "MRKL Agent for Chat Models",
"name": "mrklAgentChat",
"type": "AgentExecutor",
@ -280,13 +280,13 @@
"name": "tools",
"type": "Tool",
"list": true,
"id": "mrklAgentChat_1-input-tools-Tool"
"id": "mrklAgentChat_2-input-tools-Tool"
},
{
"label": "Chat Model",
"label": "Language Model",
"name": "model",
"type": "BaseChatModel",
"id": "mrklAgentChat_1-input-model-BaseChatModel"
"type": "BaseLanguageModel",
"id": "mrklAgentChat_2-input-model-BaseLanguageModel"
}
],
"inputs": {
@ -295,7 +295,7 @@
},
"outputAnchors": [
{
"id": "mrklAgentChat_1-output-mrklAgentChat-AgentExecutor|BaseChain|BaseLangChain",
"id": "mrklAgentChat_2-output-mrklAgentChat-AgentExecutor|BaseChain|BaseLangChain",
"name": "mrklAgentChat",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain | BaseLangChain"
@ -305,8 +305,8 @@
"selected": false
},
"positionAbsolute": {
"x": 1484.694717249578,
"y": 460.4139893408708
"x": 1503.5956978371041,
"y": 418.46259909490925
},
"selected": false,
"dragging": false
@ -316,10 +316,10 @@
{
"source": "aiPlugin_1",
"sourceHandle": "aiPlugin_1-output-aiPlugin-AIPlugin|Tool|StructuredTool|BaseLangChain",
"target": "mrklAgentChat_1",
"targetHandle": "mrklAgentChat_1-input-tools-Tool",
"target": "mrklAgentChat_2",
"targetHandle": "mrklAgentChat_2-input-tools-Tool",
"type": "buttonedge",
"id": "aiPlugin_1-aiPlugin_1-output-aiPlugin-AIPlugin|Tool|StructuredTool|BaseLangChain-mrklAgentChat_1-mrklAgentChat_1-input-tools-Tool",
"id": "aiPlugin_1-aiPlugin_1-output-aiPlugin-AIPlugin|Tool|StructuredTool|BaseLangChain-mrklAgentChat_2-mrklAgentChat_2-input-tools-Tool",
"data": {
"label": ""
}
@ -327,10 +327,10 @@
{
"source": "requestsPost_1",
"sourceHandle": "requestsPost_1-output-requestsPost-RequestsPost|Tool|StructuredTool|BaseLangChain",
"target": "mrklAgentChat_1",
"targetHandle": "mrklAgentChat_1-input-tools-Tool",
"target": "mrklAgentChat_2",
"targetHandle": "mrklAgentChat_2-input-tools-Tool",
"type": "buttonedge",
"id": "requestsPost_1-requestsPost_1-output-requestsPost-RequestsPost|Tool|StructuredTool|BaseLangChain-mrklAgentChat_1-mrklAgentChat_1-input-tools-Tool",
"id": "requestsPost_1-requestsPost_1-output-requestsPost-RequestsPost|Tool|StructuredTool|BaseLangChain-mrklAgentChat_2-mrklAgentChat_2-input-tools-Tool",
"data": {
"label": ""
}
@ -338,10 +338,10 @@
{
"source": "requestsGet_1",
"sourceHandle": "requestsGet_1-output-requestsGet-RequestsGet|Tool|StructuredTool|BaseLangChain",
"target": "mrklAgentChat_1",
"targetHandle": "mrklAgentChat_1-input-tools-Tool",
"target": "mrklAgentChat_2",
"targetHandle": "mrklAgentChat_2-input-tools-Tool",
"type": "buttonedge",
"id": "requestsGet_1-requestsGet_1-output-requestsGet-RequestsGet|Tool|StructuredTool|BaseLangChain-mrklAgentChat_1-mrklAgentChat_1-input-tools-Tool",
"id": "requestsGet_1-requestsGet_1-output-requestsGet-RequestsGet|Tool|StructuredTool|BaseLangChain-mrklAgentChat_2-mrklAgentChat_2-input-tools-Tool",
"data": {
"label": ""
}
@ -349,10 +349,10 @@
{
"source": "chatOpenAI_1",
"sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain",
"target": "mrklAgentChat_1",
"targetHandle": "mrklAgentChat_1-input-model-BaseChatModel",
"target": "mrklAgentChat_2",
"targetHandle": "mrklAgentChat_2-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-mrklAgentChat_1-mrklAgentChat_1-input-model-BaseChatModel",
"id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-mrklAgentChat_2-mrklAgentChat_2-input-model-BaseLanguageModel",
"data": {
"label": ""
}

View File

@ -3,7 +3,7 @@
"nodes": [
{
"width": 300,
"height": 526,
"height": 524,
"id": "chatOpenAI_1",
"position": {
"x": 56.646518061018355,
@ -134,7 +134,7 @@
},
{
"width": 300,
"height": 279,
"height": 278,
"id": "serpAPI_1",
"position": {
"x": 436.94138168947336,
@ -217,7 +217,7 @@
},
{
"width": 300,
"height": 378,
"height": 376,
"id": "bufferMemory_1",
"position": {
"x": 573.479796337051,
@ -273,15 +273,15 @@
},
{
"width": 300,
"height": 385,
"id": "conversationalAgent_1",
"height": 383,
"id": "conversationalAgent_0",
"position": {
"x": 1326.6941863827417,
"y": 252.6965991008108
"x": 1206.1996037716035,
"y": 227.39579577603587
},
"type": "customNode",
"data": {
"id": "conversationalAgent_1",
"id": "conversationalAgent_0",
"label": "Conversational Agent",
"name": "conversationalAgent",
"type": "AgentExecutor",
@ -296,7 +296,7 @@
"rows": 4,
"optional": true,
"additionalParams": true,
"id": "conversationalAgent_1-input-systemMessage-string"
"id": "conversationalAgent_0-input-systemMessage-string"
},
{
"label": "Human Message",
@ -305,7 +305,7 @@
"rows": 4,
"optional": true,
"additionalParams": true,
"id": "conversationalAgent_1-input-humanMessage-string"
"id": "conversationalAgent_0-input-humanMessage-string"
}
],
"inputAnchors": [
@ -314,19 +314,19 @@
"name": "tools",
"type": "Tool",
"list": true,
"id": "conversationalAgent_1-input-tools-Tool"
"id": "conversationalAgent_0-input-tools-Tool"
},
{
"label": "Chat Model",
"label": "Language Model",
"name": "model",
"type": "BaseChatModel",
"id": "conversationalAgent_1-input-model-BaseChatModel"
"type": "BaseLanguageModel",
"id": "conversationalAgent_0-input-model-BaseLanguageModel"
},
{
"label": "Memory",
"name": "memory",
"type": "BaseChatMemory",
"id": "conversationalAgent_1-input-memory-BaseChatMemory"
"id": "conversationalAgent_0-input-memory-BaseChatMemory"
}
],
"inputs": {
@ -338,7 +338,7 @@
},
"outputAnchors": [
{
"id": "conversationalAgent_1-output-conversationalAgent-AgentExecutor|BaseChain|BaseLangChain",
"id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|BaseLangChain",
"name": "conversationalAgent",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain | BaseLangChain"
@ -347,11 +347,11 @@
"outputs": {},
"selected": false
},
"positionAbsolute": {
"x": 1326.6941863827417,
"y": 252.6965991008108
},
"selected": false,
"positionAbsolute": {
"x": 1206.1996037716035,
"y": 227.39579577603587
},
"dragging": false
}
],
@ -359,10 +359,10 @@
{
"source": "calculator_1",
"sourceHandle": "calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain",
"target": "conversationalAgent_1",
"targetHandle": "conversationalAgent_1-input-tools-Tool",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-tools-Tool",
"type": "buttonedge",
"id": "calculator_1-calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain-conversationalAgent_1-conversationalAgent_1-input-tools-Tool",
"id": "calculator_1-calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-tools-Tool",
"data": {
"label": ""
}
@ -370,10 +370,10 @@
{
"source": "serpAPI_1",
"sourceHandle": "serpAPI_1-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain",
"target": "conversationalAgent_1",
"targetHandle": "conversationalAgent_1-input-tools-Tool",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-tools-Tool",
"type": "buttonedge",
"id": "serpAPI_1-serpAPI_1-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain-conversationalAgent_1-conversationalAgent_1-input-tools-Tool",
"id": "serpAPI_1-serpAPI_1-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-tools-Tool",
"data": {
"label": ""
}
@ -381,10 +381,10 @@
{
"source": "chatOpenAI_1",
"sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain",
"target": "conversationalAgent_1",
"targetHandle": "conversationalAgent_1-input-model-BaseChatModel",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-conversationalAgent_1-conversationalAgent_1-input-model-BaseChatModel",
"id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-model-BaseLanguageModel",
"data": {
"label": ""
}
@ -392,10 +392,10 @@
{
"source": "bufferMemory_1",
"sourceHandle": "bufferMemory_1-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
"target": "conversationalAgent_1",
"targetHandle": "conversationalAgent_1-input-memory-BaseChatMemory",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-memory-BaseChatMemory",
"type": "buttonedge",
"id": "bufferMemory_1-bufferMemory_1-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationalAgent_1-conversationalAgent_1-input-memory-BaseChatMemory",
"id": "bufferMemory_1-bufferMemory_1-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationalAgent_0-conversationalAgent_0-input-memory-BaseChatMemory",
"data": {
"label": ""
}

View File

@ -1,89 +1,6 @@
{
"description": "Conversational Agent with ability to visit a website and extract information",
"nodes": [
{
"width": 300,
"height": 383,
"id": "conversationalAgent_0",
"position": {
"x": 1527.4605987432717,
"y": 242.32866622482635
},
"type": "customNode",
"data": {
"id": "conversationalAgent_0",
"label": "Conversational Agent",
"name": "conversationalAgent",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain"],
"category": "Agents",
"description": "Conversational agent for a chat model. It will utilize chat specific prompts",
"inputParams": [
{
"label": "System Message",
"name": "systemMessage",
"type": "string",
"rows": 4,
"optional": true,
"additionalParams": true,
"id": "conversationalAgent_0-input-systemMessage-string"
},
{
"label": "Human Message",
"name": "humanMessage",
"type": "string",
"rows": 4,
"optional": true,
"additionalParams": true,
"id": "conversationalAgent_0-input-humanMessage-string"
}
],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "conversationalAgent_0-input-tools-Tool"
},
{
"label": "Chat Model",
"name": "model",
"type": "BaseChatModel",
"id": "conversationalAgent_0-input-model-BaseChatModel"
},
{
"label": "Memory",
"name": "memory",
"type": "BaseChatMemory",
"id": "conversationalAgent_0-input-memory-BaseChatMemory"
}
],
"inputs": {
"tools": ["{{webBrowser_0.data.instance}}"],
"model": "{{chatOpenAI_0.data.instance}}",
"memory": "{{bufferMemory_0.data.instance}}",
"systemMessage": "",
"humanMessage": ""
},
"outputAnchors": [
{
"id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|BaseLangChain",
"name": "conversationalAgent",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain | BaseLangChain"
}
],
"outputs": {},
"selected": false
},
"positionAbsolute": {
"x": 1527.4605987432717,
"y": 242.32866622482635
},
"selected": false,
"dragging": false
},
{
"width": 300,
"height": 524,
@ -528,20 +445,92 @@
"y": -16.43806989958216
},
"dragging": false
},
{
"width": 300,
"height": 383,
"id": "conversationalAgent_0",
"position": {
"x": 1451.6222493253506,
"y": 239.69137914100338
},
"type": "customNode",
"data": {
"id": "conversationalAgent_0",
"label": "Conversational Agent",
"name": "conversationalAgent",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain"],
"category": "Agents",
"description": "Conversational agent for a chat model. It will utilize chat specific prompts",
"inputParams": [
{
"label": "System Message",
"name": "systemMessage",
"type": "string",
"rows": 4,
"optional": true,
"additionalParams": true,
"id": "conversationalAgent_0-input-systemMessage-string"
},
{
"label": "Human Message",
"name": "humanMessage",
"type": "string",
"rows": 4,
"optional": true,
"additionalParams": true,
"id": "conversationalAgent_0-input-humanMessage-string"
}
],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "conversationalAgent_0-input-tools-Tool"
},
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "conversationalAgent_0-input-model-BaseLanguageModel"
},
{
"label": "Memory",
"name": "memory",
"type": "BaseChatMemory",
"id": "conversationalAgent_0-input-memory-BaseChatMemory"
}
],
"inputs": {
"tools": ["{{webBrowser_0.data.instance}}"],
"model": "{{chatOpenAI_0.data.instance}}",
"memory": "{{bufferMemory_0.data.instance}}",
"systemMessage": "",
"humanMessage": ""
},
"outputAnchors": [
{
"id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|BaseLangChain",
"name": "conversationalAgent",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain | BaseLangChain"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1451.6222493253506,
"y": 239.69137914100338
},
"dragging": false
}
],
"edges": [
{
"source": "bufferMemory_0",
"sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-memory-BaseChatMemory",
"type": "buttonedge",
"id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationalAgent_0-conversationalAgent_0-input-memory-BaseChatMemory",
"data": {
"label": ""
}
},
{
"source": "chatOpenAI_1",
"sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain",
@ -564,17 +553,6 @@
"label": ""
}
},
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-model-BaseChatModel",
"data": {
"label": ""
}
},
{
"source": "webBrowser_0",
"sourceHandle": "webBrowser_0-output-webBrowser-WebBrowser|Tool|StructuredTool|BaseLangChain",
@ -585,6 +563,28 @@
"data": {
"label": ""
}
},
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-model-BaseLanguageModel",
"data": {
"label": ""
}
},
{
"source": "bufferMemory_0",
"sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-memory-BaseChatMemory",
"type": "buttonedge",
"id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationalAgent_0-conversationalAgent_0-input-memory-BaseChatMemory",
"data": {
"label": ""
}
}
]
}