add conversational retrieval agent

This commit is contained in:
Henry 2023-08-06 19:45:21 +01:00
parent d208eae868
commit 0ae6f53295
18 changed files with 1018 additions and 103 deletions

View File

@ -1,9 +1,8 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor, InitializeAgentExecutorOptions } from 'langchain/agents'
import { Tool } from 'langchain/tools'
import { BaseChatMemory, ChatMessageHistory } from 'langchain/memory'
import { getBaseClasses } from '../../../src/utils'
import { AIMessage, HumanMessage } from 'langchain/schema'
import { BaseChatMemory } from 'langchain/memory'
import { getBaseClasses, mapChatHistory } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { flatten } from 'lodash'
@ -93,19 +92,10 @@ class ConversationalAgent_Agents implements INode {
const memory = nodeData.inputs?.memory as BaseChatMemory
if (options && options.chatHistory) {
const chatHistory = []
const histories: IMessage[] = options.chatHistory
for (const message of histories) {
if (message.type === 'apiMessage') {
chatHistory.push(new AIMessage(message.message))
} else if (message.type === 'userMessage') {
chatHistory.push(new HumanMessage(message.message))
}
}
memory.chatHistory = new ChatMessageHistory(chatHistory)
memory.chatHistory = mapChatHistory(options)
executor.memory = memory
}
const result = await executor.call({ input })
return result?.output

View File

@ -0,0 +1,100 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents'
import { getBaseClasses, mapChatHistory } from '../../../src/utils'
import { flatten } from 'lodash'
import { BaseChatMemory } from 'langchain/memory'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class ConversationalRetrievalAgent_Agents implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'Conversational Retrieval Agent'
this.name = 'conversationalRetrievalAgent'
this.version = 1.0
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'agent.svg'
this.description = `An agent optimized for retrieval during conversation, answering questions based on past dialogue, all using OpenAI's Function Calling`
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.inputs = [
{
label: 'Allowed Tools',
name: 'tools',
type: 'Tool',
list: true
},
{
label: 'Memory',
name: 'memory',
type: 'BaseChatMemory'
},
{
label: 'OpenAI Chat Model',
name: 'model',
type: 'ChatOpenAI'
},
{
label: 'System Message',
name: 'systemMessage',
type: 'string',
rows: 4,
optional: true,
additionalParams: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const model = nodeData.inputs?.model
const memory = nodeData.inputs?.memory as BaseChatMemory
const systemMessage = nodeData.inputs?.systemMessage as string
let tools = nodeData.inputs?.tools
tools = flatten(tools)
const executor = await initializeAgentExecutorWithOptions(tools, model, {
agentType: 'openai-functions',
verbose: process.env.DEBUG === 'true' ? true : false,
agentArgs: {
prefix: systemMessage ?? `You are a helpful AI assistant.`
},
returnIntermediateSteps: true
})
executor.memory = memory
return executor
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const executor = nodeData.instance as AgentExecutor
if (options && options.chatHistory) {
if (executor.memory) {
;(executor.memory as any).memoryKey = 'chat_history'
;(executor.memory as any).outputKey = 'output'
;(executor.memory as any).chatHistory = mapChatHistory(options)
}
}
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const result = await executor.call({ input }, [loggerHandler, handler])
return result?.output
} else {
const result = await executor.call({ input }, [loggerHandler])
return result?.output
}
}
}
module.exports = { nodeClass: ConversationalRetrievalAgent_Agents }

View File

@ -0,0 +1,9 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-robot" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M7 7h10a2 2 0 0 1 2 2v1l1 1v3l-1 1v3a2 2 0 0 1 -2 2h-10a2 2 0 0 1 -2 -2v-3l-1 -1v-3l1 -1v-1a2 2 0 0 1 2 -2z"></path>
<path d="M10 16h4"></path>
<circle cx="8.5" cy="11.5" r=".5" fill="currentColor"></circle>
<circle cx="15.5" cy="11.5" r=".5" fill="currentColor"></circle>
<path d="M9 7l-1 -4"></path>
<path d="M15 7l1 -4"></path>
</svg>

After

Width:  |  Height:  |  Size: 650 B

View File

@ -1,10 +1,9 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents'
import { getBaseClasses } from '../../../src/utils'
import { getBaseClasses, mapChatHistory } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { flatten } from 'lodash'
import { BaseChatMemory, ChatMessageHistory } from 'langchain/memory'
import { AIMessage, HumanMessage } from 'langchain/schema'
import { BaseChatMemory } from 'langchain/memory'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class OpenAIFunctionAgent_Agents implements INode {
@ -82,17 +81,7 @@ class OpenAIFunctionAgent_Agents implements INode {
const memory = nodeData.inputs?.memory as BaseChatMemory
if (options && options.chatHistory) {
const chatHistory = []
const histories: IMessage[] = options.chatHistory
for (const message of histories) {
if (message.type === 'apiMessage') {
chatHistory.push(new AIMessage(message.message))
} else if (message.type === 'userMessage') {
chatHistory.push(new HumanMessage(message.message))
}
}
memory.chatHistory = new ChatMessageHistory(chatHistory)
memory.chatHistory = mapChatHistory(options)
executor.memory = memory
}

View File

@ -1,10 +1,9 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ConversationChain } from 'langchain/chains'
import { getBaseClasses } from '../../../src/utils'
import { getBaseClasses, mapChatHistory } from '../../../src/utils'
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts'
import { BufferMemory, ChatMessageHistory } from 'langchain/memory'
import { BufferMemory } from 'langchain/memory'
import { BaseChatModel } from 'langchain/chat_models/base'
import { AIMessage, HumanMessage } from 'langchain/schema'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
import { flatten } from 'lodash'
import { Document } from 'langchain/document'
@ -106,17 +105,7 @@ class ConversationChain_Chains implements INode {
const memory = nodeData.inputs?.memory as BufferMemory
if (options && options.chatHistory) {
const chatHistory = []
const histories: IMessage[] = options.chatHistory
for (const message of histories) {
if (message.type === 'apiMessage') {
chatHistory.push(new AIMessage(message.message))
} else if (message.type === 'userMessage') {
chatHistory.push(new HumanMessage(message.message))
}
}
memory.chatHistory = new ChatMessageHistory(chatHistory)
memory.chatHistory = mapChatHistory(options)
chain.memory = memory
}

View File

@ -1,10 +1,9 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, mapChatHistory } from '../../../src/utils'
import { ConversationalRetrievalQAChain, QAChainParams } from 'langchain/chains'
import { AIMessage, HumanMessage } from 'langchain/schema'
import { BaseRetriever } from 'langchain/schema/retriever'
import { BaseChatMemory, BufferMemory, ChatMessageHistory, BufferMemoryInput } from 'langchain/memory'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
import { PromptTemplate } from 'langchain/prompts'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
import {
@ -105,7 +104,7 @@ class ConversationalRetrievalQAChain_Chains implements INode {
const systemMessagePrompt = nodeData.inputs?.systemMessagePrompt as string
const returnSourceDocuments = nodeData.inputs?.returnSourceDocuments as boolean
const chainOption = nodeData.inputs?.chainOption as string
const memory = nodeData.inputs?.memory
const externalMemory = nodeData.inputs?.memory
const obj: any = {
verbose: process.env.DEBUG === 'true' ? true : false,
@ -113,7 +112,9 @@ class ConversationalRetrievalQAChain_Chains implements INode {
template: CUSTOM_QUESTION_GENERATOR_CHAIN_PROMPT
}
}
if (returnSourceDocuments) obj.returnSourceDocuments = returnSourceDocuments
if (chainOption === 'map_reduce') {
obj.qaChainOptions = {
type: 'map_reduce',
@ -142,20 +143,21 @@ class ConversationalRetrievalQAChain_Chains implements INode {
} as QAChainParams
}
if (memory) {
memory.inputKey = 'question'
memory.memoryKey = 'chat_history'
if (chainOption === 'refine') memory.outputKey = 'output_text'
else memory.outputKey = 'text'
obj.memory = memory
if (externalMemory) {
externalMemory.memoryKey = 'chat_history'
externalMemory.inputKey = 'question'
externalMemory.outputKey = 'text'
externalMemory.returnMessages = true
if (chainOption === 'refine') externalMemory.outputKey = 'output_text'
obj.memory = externalMemory
} else {
const fields: BufferMemoryInput = {
memoryKey: 'chat_history',
inputKey: 'question',
outputKey: 'text',
returnMessages: true
}
if (chainOption === 'refine') fields.outputKey = 'output_text'
else fields.outputKey = 'text'
obj.memory = new BufferMemory(fields)
}
@ -166,7 +168,6 @@ class ConversationalRetrievalQAChain_Chains implements INode {
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
const chain = nodeData.instance as ConversationalRetrievalQAChain
const returnSourceDocuments = nodeData.inputs?.returnSourceDocuments as boolean
const memory = nodeData.inputs?.memory
const chainOption = nodeData.inputs?.chainOption as string
let model = nodeData.inputs?.model
@ -177,21 +178,8 @@ class ConversationalRetrievalQAChain_Chains implements INode {
const obj = { question: input }
// If external memory like Zep, Redis is being used, ignore below
if (!memory && chain.memory && options && options.chatHistory) {
const chatHistory = []
const histories: IMessage[] = options.chatHistory
const memory = chain.memory as BaseChatMemory
for (const message of histories) {
if (message.type === 'apiMessage') {
chatHistory.push(new AIMessage(message.message))
} else if (message.type === 'userMessage') {
chatHistory.push(new HumanMessage(message.message))
}
}
memory.chatHistory = new ChatMessageHistory(chatHistory)
chain.memory = memory
if (options && options.chatHistory && chain.memory) {
;(chain.memory as any).chatHistory = mapChatHistory(options)
}
const loggerHandler = new ConsoleCallbackHandler(options.logger)

View File

@ -1,6 +1,6 @@
import { ICommonObject, INode, INodeData, INodeParams, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src'
import { DynamoDBChatMessageHistory } from 'langchain/stores/message/dynamodb'
import { BufferMemory } from 'langchain/memory'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
class DynamoDb_Memory implements INode {
label: string
@ -51,7 +51,7 @@ class DynamoDb_Memory implements INode {
label: 'Session ID',
name: 'sessionId',
type: 'string',
description: 'if empty, chatId will be used automatically',
description: 'If not specified, the first CHAT_MESSAGE_ID will be used as sessionId',
default: '',
additionalParams: true,
optional: true
@ -86,9 +86,11 @@ const initalizeDynamoDB = async (nodeData: INodeData, options: ICommonObject): P
const sessionId = nodeData.inputs?.sessionId as string
const region = nodeData.inputs?.region as string
const memoryKey = nodeData.inputs?.memoryKey as string
const chatId = options.chatId
let isSessionIdUsingChatMessageId = false
if (!sessionId && chatId) isSessionIdUsingChatMessageId = true
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const accessKeyId = getCredentialParam('accessKey', credentialData, nodeData)
const secretAccessKey = getCredentialParam('secretAccessKey', credentialData, nodeData)
@ -106,12 +108,26 @@ const initalizeDynamoDB = async (nodeData: INodeData, options: ICommonObject): P
}
})
const memory = new BufferMemory({
const memory = new BufferMemoryExtended({
memoryKey,
chatHistory: dynamoDb,
returnMessages: true
returnMessages: true,
isSessionIdUsingChatMessageId
})
return memory
}
interface BufferMemoryExtendedInput {
isSessionIdUsingChatMessageId: boolean
}
class BufferMemoryExtended extends BufferMemory {
isSessionIdUsingChatMessageId? = false
constructor(fields: BufferMemoryInput & Partial<BufferMemoryExtendedInput>) {
super(fields)
this.isSessionIdUsingChatMessageId = fields.isSessionIdUsingChatMessageId
}
}
module.exports = { nodeClass: DynamoDb_Memory }

View File

@ -2,6 +2,7 @@ import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ICommonObject } from '../../../src'
import { MotorheadMemory, MotorheadMemoryInput } from 'langchain/memory'
import fetch from 'node-fetch'
class MotorMemory_Memory implements INode {
label: string
@ -44,7 +45,7 @@ class MotorMemory_Memory implements INode {
label: 'Session Id',
name: 'sessionId',
type: 'string',
description: 'if empty, chatId will be used automatically',
description: 'If not specified, the first CHAT_MESSAGE_ID will be used as sessionId',
default: '',
additionalParams: true,
optional: true
@ -77,14 +78,16 @@ const initalizeMotorhead = async (nodeData: INodeData, options: ICommonObject):
const memoryKey = nodeData.inputs?.memoryKey as string
const baseURL = nodeData.inputs?.baseURL as string
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
let isSessionIdUsingChatMessageId = false
if (!sessionId && chatId) isSessionIdUsingChatMessageId = true
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
const clientId = getCredentialParam('clientId', credentialData, nodeData)
let obj: MotorheadMemoryInput = {
let obj: MotorheadMemoryInput & Partial<MotorheadMemoryExtendedInput> = {
returnMessages: true,
sessionId: sessionId ? sessionId : chatId,
memoryKey
@ -103,7 +106,44 @@ const initalizeMotorhead = async (nodeData: INodeData, options: ICommonObject):
}
}
return new MotorheadMemory(obj)
if (isSessionIdUsingChatMessageId) obj.isSessionIdUsingChatMessageId = true
const motorheadMemory = new MotorheadMemoryExtended(obj)
// Get messages from sessionId
await motorheadMemory.init()
return motorheadMemory
}
interface MotorheadMemoryExtendedInput {
isSessionIdUsingChatMessageId: boolean
}
class MotorheadMemoryExtended extends MotorheadMemory {
isSessionIdUsingChatMessageId? = false
constructor(fields: MotorheadMemoryInput & Partial<MotorheadMemoryExtendedInput>) {
super(fields)
this.isSessionIdUsingChatMessageId = fields.isSessionIdUsingChatMessageId
}
async clear(): Promise<void> {
try {
await this.caller.call(fetch, `${this.url}/sessions/${this.sessionId}/memory`, {
//@ts-ignore
signal: this.timeout ? AbortSignal.timeout(this.timeout) : undefined,
headers: this._getHeaders() as ICommonObject,
method: 'DELETE'
})
} catch (error) {
console.error('Error deleting session: ', error)
}
// Clear the superclass's chat history
await this.chatHistory.clear()
await super.clear()
}
}
module.exports = { nodeClass: MotorMemory_Memory }

View File

@ -1,7 +1,7 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ICommonObject } from '../../../src'
import { BufferMemory } from 'langchain/memory'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
import { RedisChatMessageHistory, RedisChatMessageHistoryInput } from 'langchain/stores/message/redis'
import { createClient } from 'redis'
@ -36,7 +36,7 @@ class RedisBackedChatMemory_Memory implements INode {
label: 'Session Id',
name: 'sessionId',
type: 'string',
description: 'if empty, chatId will be used automatically',
description: 'If not specified, the first CHAT_MESSAGE_ID will be used as sessionId',
default: '',
additionalParams: true,
optional: true
@ -78,9 +78,11 @@ const initalizeRedis = (nodeData: INodeData, options: ICommonObject): BufferMemo
const sessionId = nodeData.inputs?.sessionId as string
const sessionTTL = nodeData.inputs?.sessionTTL as number
const memoryKey = nodeData.inputs?.memoryKey as string
const chatId = options?.chatId as string
let isSessionIdUsingChatMessageId = false
if (!sessionId && chatId) isSessionIdUsingChatMessageId = true
const redisClient = createClient({ url: baseURL })
let obj: RedisChatMessageHistoryInput = {
sessionId: sessionId ? sessionId : chatId,
@ -94,10 +96,28 @@ const initalizeRedis = (nodeData: INodeData, options: ICommonObject): BufferMemo
}
}
let redisChatMessageHistory = new RedisChatMessageHistory(obj)
let redis = new BufferMemory({ memoryKey, chatHistory: redisChatMessageHistory, returnMessages: true })
const redisChatMessageHistory = new RedisChatMessageHistory(obj)
return redis
const memory = new BufferMemoryExtended({
memoryKey,
chatHistory: redisChatMessageHistory,
returnMessages: true,
isSessionIdUsingChatMessageId
})
return memory
}
interface BufferMemoryExtendedInput {
isSessionIdUsingChatMessageId: boolean
}
class BufferMemoryExtended extends BufferMemory {
isSessionIdUsingChatMessageId? = false
constructor(fields: BufferMemoryInput & Partial<BufferMemoryExtendedInput>) {
super(fields)
this.isSessionIdUsingChatMessageId = fields.isSessionIdUsingChatMessageId
}
}
module.exports = { nodeClass: RedisBackedChatMemory_Memory }

View File

@ -50,7 +50,7 @@ class ZepMemory_Memory implements INode {
label: 'Session Id',
name: 'sessionId',
type: 'string',
description: 'if empty, chatId will be used automatically',
description: 'If not specified, the first CHAT_MESSAGE_ID will be used as sessionId',
default: '',
additionalParams: true,
optional: true
@ -156,13 +156,15 @@ const initalizeZep = async (nodeData: INodeData, options: ICommonObject): Promis
const memoryKey = nodeData.inputs?.memoryKey as string
const inputKey = nodeData.inputs?.inputKey as string
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
let isSessionIdUsingChatMessageId = false
if (!sessionId && chatId) isSessionIdUsingChatMessageId = true
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
const obj: ZepMemoryInput = {
const obj: ZepMemoryInput & Partial<ZepMemoryExtendedInput> = {
baseURL,
sessionId: sessionId ? sessionId : chatId,
aiPrefix,
@ -172,8 +174,39 @@ const initalizeZep = async (nodeData: INodeData, options: ICommonObject): Promis
inputKey
}
if (apiKey) obj.apiKey = apiKey
if (isSessionIdUsingChatMessageId) obj.isSessionIdUsingChatMessageId = true
return new ZepMemory(obj)
return new ZepMemoryExtended(obj)
}
interface ZepMemoryExtendedInput {
isSessionIdUsingChatMessageId: boolean
}
class ZepMemoryExtended extends ZepMemory {
isSessionIdUsingChatMessageId? = false
constructor(fields: ZepMemoryInput & Partial<ZepMemoryExtendedInput>) {
super(fields)
this.isSessionIdUsingChatMessageId = fields.isSessionIdUsingChatMessageId
}
async clear(): Promise<void> {
// Only clear when sessionId is using chatId
// If sessionId is specified, clearing and inserting again will error because the sessionId has been soft deleted
// If using chatId, it will not be a problem because the sessionId will always be the new chatId
if (this.isSessionIdUsingChatMessageId) {
try {
await this.zepClient.deleteMemory(this.sessionId)
} catch (error) {
console.error('Error deleting session: ', error)
}
// Clear the superclass's chat history
await super.clear()
}
await this.chatHistory.clear()
}
}
module.exports = { nodeClass: ZepMemory_Memory }

View File

@ -0,0 +1,65 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { DynamicTool } from 'langchain/tools'
import { createRetrieverTool } from 'langchain/agents/toolkits'
import { BaseRetriever } from 'langchain/schema/retriever'
class Retriever_Tools implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]
constructor() {
this.label = 'Retriever Tool'
this.name = 'retrieverTool'
this.version = 1.0
this.type = 'RetrieverTool'
this.icon = 'retriever-tool.png'
this.category = 'Tools'
this.description = 'Use a retriever as allowed tool for agent'
this.baseClasses = [this.type, 'DynamicTool', ...getBaseClasses(DynamicTool)]
this.inputs = [
{
label: 'Retriever Name',
name: 'name',
type: 'string',
placeholder: 'search_state_of_union'
},
{
label: 'Retriever Description',
name: 'description',
type: 'string',
description: 'When should agent uses to retrieve documents',
rows: 3,
placeholder: 'Searches and returns documents regarding the state-of-the-union.'
},
{
label: 'Retriever',
name: 'retriever',
type: 'BaseRetriever'
}
]
}
async init(nodeData: INodeData): Promise<any> {
const name = nodeData.inputs?.name as string
const description = nodeData.inputs?.description as string
const retriever = nodeData.inputs?.retriever as BaseRetriever
const tool = createRetrieverTool(retriever, {
name,
description
})
return tool
}
}
module.exports = { nodeClass: Retriever_Tools }

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

View File

@ -38,7 +38,7 @@
"form-data": "^4.0.0",
"graphql": "^16.6.0",
"html-to-text": "^9.0.5",
"langchain": "^0.0.117",
"langchain": "^0.0.122",
"linkifyjs": "^4.1.1",
"mammoth": "^1.5.1",
"moment": "^2.29.3",

View File

@ -4,8 +4,10 @@ import * as fs from 'fs'
import * as path from 'path'
import { JSDOM } from 'jsdom'
import { DataSource } from 'typeorm'
import { ICommonObject, IDatabaseEntity, INodeData } from './Interface'
import { ICommonObject, IDatabaseEntity, IMessage, INodeData } from './Interface'
import { AES, enc } from 'crypto-js'
import { ChatMessageHistory } from 'langchain/memory'
import { AIMessage, HumanMessage } from 'langchain/schema'
export const numberOrExpressionRegex = '^(\\d+\\.?\\d*|{{.*}})$' //return true if string consists only numbers OR expression {{}}
export const notEmptyRegex = '(.|\\s)*\\S(.|\\s)*' //return true if string is not empty or blank
@ -204,6 +206,9 @@ export const getAvailableURLs = async (url: string, limit: number) => {
/**
* Search for href through htmlBody string
* @param {string} htmlBody
* @param {string} baseURL
* @returns {string[]}
*/
function getURLsFromHTML(htmlBody: string, baseURL: string): string[] {
const dom = new JSDOM(htmlBody)
@ -233,6 +238,8 @@ function getURLsFromHTML(htmlBody: string, baseURL: string): string[] {
/**
* Normalize URL to prevent crawling the same page
* @param {string} urlString
* @returns {string}
*/
function normalizeURL(urlString: string): string {
const urlObj = new URL(urlString)
@ -246,6 +253,11 @@ function normalizeURL(urlString: string): string {
/**
* Recursive crawl using normalizeURL and getURLsFromHTML
* @param {string} baseURL
* @param {string} currentURL
* @param {string[]} pages
* @param {number} limit
* @returns {Promise<string[]>}
*/
async function crawl(baseURL: string, currentURL: string, pages: string[], limit: number): Promise<string[]> {
const baseURLObj = new URL(baseURL)
@ -290,6 +302,9 @@ async function crawl(baseURL: string, currentURL: string, pages: string[], limit
/**
* Prep URL before passing into recursive carwl function
* @param {string} stringURL
* @param {number} limit
* @returns {Promise<string[]>}
*/
export async function webCrawl(stringURL: string, limit: number): Promise<string[]> {
const URLObj = new URL(stringURL)
@ -336,10 +351,10 @@ export async function xmlScrape(currentURL: string, limit: number): Promise<stri
return urls
}
/*
/**
* Get env variables
* @param {string} url
* @returns {string[]}
* @param {string} name
* @returns {string | undefined}
*/
export const getEnvironmentVariable = (name: string): string | undefined => {
try {
@ -470,6 +485,10 @@ export function handleEscapeCharacters(input: any, reverse: Boolean): any {
return input
}
/**
* Get user home dir
* @returns {string}
*/
export const getUserHome = (): string => {
let variableName = 'HOME'
if (process.platform === 'win32') {
@ -482,3 +501,22 @@ export const getUserHome = (): string => {
}
return process.env[variableName] as string
}
/**
* Map incoming chat history to ChatMessageHistory
* @param {options} ICommonObject
* @returns {ChatMessageHistory}
*/
export const mapChatHistory = (options: ICommonObject): ChatMessageHistory => {
const chatHistory = []
const histories: IMessage[] = options.chatHistory
for (const message of histories) {
if (message.type === 'apiMessage') {
chatHistory.push(new AIMessage(message.message))
} else if (message.type === 'userMessage') {
chatHistory.push(new HumanMessage(message.message))
}
}
return new ChatMessageHistory(chatHistory)
}

View File

@ -0,0 +1,607 @@
{
"description": "Agent optimized for vector retrieval during conversation and answering questions based on previous dialogue.",
"nodes": [
{
"width": 300,
"height": 523,
"id": "chatOpenAI_0",
"position": {
"x": 1381.867549919116,
"y": 212.76900895393834
},
"type": "customNode",
"data": {
"id": "chatOpenAI_0",
"label": "ChatOpenAI",
"version": 1,
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
"description": "Wrapper around OpenAI large language models that use the Chat endpoint",
"inputParams": [
{
"label": "Connect Credential",
"name": "credential",
"type": "credential",
"credentialNames": ["openAIApi"],
"id": "chatOpenAI_0-input-credential-credential"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "gpt-4",
"name": "gpt-4"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-4-32k",
"name": "gpt-4-32k"
},
{
"label": "gpt-4-32k-0613",
"name": "gpt-4-32k-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
},
{
"label": "gpt-3.5-turbo-16k",
"name": "gpt-3.5-turbo-16k"
},
{
"label": "gpt-3.5-turbo-16k-0613",
"name": "gpt-3.5-turbo-16k-0613"
}
],
"default": "gpt-3.5-turbo",
"optional": true,
"id": "chatOpenAI_0-input-modelName-options"
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.9,
"optional": true,
"id": "chatOpenAI_0-input-temperature-number"
},
{
"label": "Max Tokens",
"name": "maxTokens",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-maxTokens-number"
},
{
"label": "Top Probability",
"name": "topP",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-topP-number"
},
{
"label": "Frequency Penalty",
"name": "frequencyPenalty",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-frequencyPenalty-number"
},
{
"label": "Presence Penalty",
"name": "presencePenalty",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-presencePenalty-number"
},
{
"label": "Timeout",
"name": "timeout",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-timeout-number"
},
{
"label": "BasePath",
"name": "basepath",
"type": "string",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-basepath-string"
}
],
"inputAnchors": [],
"inputs": {
"modelName": "gpt-3.5-turbo-16k",
"temperature": "0",
"maxTokens": "",
"topP": "",
"frequencyPenalty": "",
"presencePenalty": "",
"timeout": "",
"basepath": ""
},
"outputAnchors": [
{
"id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"name": "chatOpenAI",
"label": "ChatOpenAI",
"type": "ChatOpenAI | BaseChatModel | BaseLanguageModel"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1381.867549919116,
"y": 212.76900895393834
},
"dragging": false
},
{
"width": 300,
"height": 329,
"id": "openAIEmbeddings_0",
"position": {
"x": 954.0674802999345,
"y": -196.7034956445692
},
"type": "customNode",
"data": {
"id": "openAIEmbeddings_0",
"label": "OpenAI Embeddings",
"version": 1,
"name": "openAIEmbeddings",
"type": "OpenAIEmbeddings",
"baseClasses": ["OpenAIEmbeddings", "Embeddings"],
"category": "Embeddings",
"description": "OpenAI API to generate embeddings for a given text",
"inputParams": [
{
"label": "Connect Credential",
"name": "credential",
"type": "credential",
"credentialNames": ["openAIApi"],
"id": "openAIEmbeddings_0-input-credential-credential"
},
{
"label": "Strip New Lines",
"name": "stripNewLines",
"type": "boolean",
"optional": true,
"additionalParams": true,
"id": "openAIEmbeddings_0-input-stripNewLines-boolean"
},
{
"label": "Batch Size",
"name": "batchSize",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "openAIEmbeddings_0-input-batchSize-number"
},
{
"label": "Timeout",
"name": "timeout",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "openAIEmbeddings_0-input-timeout-number"
},
{
"label": "BasePath",
"name": "basepath",
"type": "string",
"optional": true,
"additionalParams": true,
"id": "openAIEmbeddings_0-input-basepath-string"
}
],
"inputAnchors": [],
"inputs": {
"stripNewLines": "",
"batchSize": "",
"timeout": "",
"basepath": ""
},
"outputAnchors": [
{
"id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
"name": "openAIEmbeddings",
"label": "OpenAIEmbeddings",
"type": "OpenAIEmbeddings | Embeddings"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 954.0674802999345,
"y": -196.7034956445692
},
"dragging": false
},
{
"width": 300,
"height": 505,
"id": "pineconeExistingIndex_0",
"position": {
"x": 1362.0018461011314,
"y": -334.0373537488481
},
"type": "customNode",
"data": {
"id": "pineconeExistingIndex_0",
"label": "Pinecone Load Existing Index",
"version": 1,
"name": "pineconeExistingIndex",
"type": "Pinecone",
"baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"],
"category": "Vector Stores",
"description": "Load existing index from Pinecone (i.e: Document has been upserted)",
"inputParams": [
{
"label": "Connect Credential",
"name": "credential",
"type": "credential",
"credentialNames": ["pineconeApi"],
"id": "pineconeExistingIndex_0-input-credential-credential"
},
{
"label": "Pinecone Index",
"name": "pineconeIndex",
"type": "string",
"id": "pineconeExistingIndex_0-input-pineconeIndex-string"
},
{
"label": "Pinecone Namespace",
"name": "pineconeNamespace",
"type": "string",
"placeholder": "my-first-namespace",
"additionalParams": true,
"optional": true,
"id": "pineconeExistingIndex_0-input-pineconeNamespace-string"
},
{
"label": "Pinecone Metadata Filter",
"name": "pineconeMetadataFilter",
"type": "json",
"optional": true,
"additionalParams": true,
"id": "pineconeExistingIndex_0-input-pineconeMetadataFilter-json"
},
{
"label": "Top K",
"name": "topK",
"description": "Number of top results to fetch. Default to 4",
"placeholder": "4",
"type": "number",
"additionalParams": true,
"optional": true,
"id": "pineconeExistingIndex_0-input-topK-number"
}
],
"inputAnchors": [
{
"label": "Embeddings",
"name": "embeddings",
"type": "Embeddings",
"id": "pineconeExistingIndex_0-input-embeddings-Embeddings"
}
],
"inputs": {
"embeddings": "{{openAIEmbeddings_0.data.instance}}",
"pineconeIndex": "newindex",
"pineconeNamespace": "",
"pineconeMetadataFilter": "",
"topK": ""
},
"outputAnchors": [
{
"name": "output",
"label": "Output",
"type": "options",
"options": [
{
"id": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever",
"name": "retriever",
"label": "Pinecone Retriever",
"type": "Pinecone | VectorStoreRetriever | BaseRetriever"
},
{
"id": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore",
"name": "vectorStore",
"label": "Pinecone Vector Store",
"type": "Pinecone | VectorStore"
}
],
"default": "retriever"
}
],
"outputs": {
"output": "retriever"
},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1362.0018461011314,
"y": -334.0373537488481
},
"dragging": false
},
{
"width": 300,
"height": 383,
"id": "conversationalRetrievalAgent_0",
"position": {
"x": 2345.912948267881,
"y": 357.97363342258217
},
"type": "customNode",
"data": {
"id": "conversationalRetrievalAgent_0",
"label": "Conversational Retrieval Agent",
"version": 1,
"name": "conversationalRetrievalAgent",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain", "Runnable"],
"category": "Agents",
"description": "An agent optimized for retrieval during conversation, answering questions based on past dialogue, all using OpenAI's Function Calling",
"inputParams": [
{
"label": "System Message",
"name": "systemMessage",
"type": "string",
"rows": 4,
"optional": true,
"additionalParams": true,
"id": "conversationalRetrievalAgent_0-input-systemMessage-string"
}
],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "conversationalRetrievalAgent_0-input-tools-Tool"
},
{
"label": "Memory",
"name": "memory",
"type": "BaseChatMemory",
"id": "conversationalRetrievalAgent_0-input-memory-BaseChatMemory"
},
{
"label": "OpenAI Chat Model",
"name": "model",
"type": "ChatOpenAI",
"id": "conversationalRetrievalAgent_0-input-model-ChatOpenAI"
}
],
"inputs": {
"tools": ["{{retrieverTool_0.data.instance}}"],
"memory": "{{bufferMemory_0.data.instance}}",
"model": "{{chatOpenAI_0.data.instance}}",
"systemMessage": ""
},
"outputAnchors": [
{
"id": "conversationalRetrievalAgent_0-output-conversationalRetrievalAgent-AgentExecutor|BaseChain|Runnable",
"name": "conversationalRetrievalAgent",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain | Runnable"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 2345.912948267881,
"y": 357.97363342258217
},
"dragging": false
},
{
"width": 300,
"height": 505,
"id": "retrieverTool_0",
"position": {
"x": 1770.1485365275375,
"y": -321.14473253480946
},
"type": "customNode",
"data": {
"id": "retrieverTool_0",
"label": "Retriever Tool",
"version": 1,
"name": "retrieverTool",
"type": "RetrieverTool",
"baseClasses": ["RetrieverTool", "DynamicTool", "Tool", "StructuredTool", "Runnable"],
"category": "Tools",
"description": "Use a retriever as allowed tool for agent",
"inputParams": [
{
"label": "Retriever Name",
"name": "name",
"type": "string",
"placeholder": "search_state_of_union",
"id": "retrieverTool_0-input-name-string"
},
{
"label": "Retriever Description",
"name": "description",
"type": "string",
"description": "When should agent uses to retrieve documents",
"rows": 3,
"placeholder": "Searches and returns documents regarding the state-of-the-union.",
"id": "retrieverTool_0-input-description-string"
}
],
"inputAnchors": [
{
"label": "Retriever",
"name": "retriever",
"type": "BaseRetriever",
"id": "retrieverTool_0-input-retriever-BaseRetriever"
}
],
"inputs": {
"name": "search_website",
"description": "Searches and return documents regarding Jane - a culinary institution that offers top quality coffee, pastries, breakfast, lunch, and a variety of baked goods. They have multiple locations, including Jane on Fillmore, Jane on Larkin, Jane the Bakery, Toy Boat By Jane, and Little Jane on Grant. They emphasize healthy eating with a focus on flavor and quality ingredients. They bake everything in-house and work with local suppliers to source ingredients directly from farmers. They also offer catering services and delivery options.",
"retriever": "{{pineconeExistingIndex_0.data.instance}}"
},
"outputAnchors": [
{
"id": "retrieverTool_0-output-retrieverTool-RetrieverTool|DynamicTool|Tool|StructuredTool|Runnable",
"name": "retrieverTool",
"label": "RetrieverTool",
"type": "RetrieverTool | DynamicTool | Tool | StructuredTool | Runnable"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"dragging": false,
"positionAbsolute": {
"x": 1770.1485365275375,
"y": -321.14473253480946
}
},
{
"width": 300,
"height": 376,
"id": "bufferMemory_0",
"position": {
"x": 1771.396291209036,
"y": 216.94151328212496
},
"type": "customNode",
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"version": 1,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
"description": "Remembers previous conversational back and forths directly",
"inputParams": [
{
"label": "Memory Key",
"name": "memoryKey",
"type": "string",
"default": "chat_history",
"id": "bufferMemory_0-input-memoryKey-string"
},
{
"label": "Input Key",
"name": "inputKey",
"type": "string",
"default": "input",
"id": "bufferMemory_0-input-inputKey-string"
}
],
"inputAnchors": [],
"inputs": {
"memoryKey": "chat_history",
"inputKey": "input"
},
"outputAnchors": [
{
"id": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
"name": "bufferMemory",
"label": "BufferMemory",
"type": "BufferMemory | BaseChatMemory | BaseMemory"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1771.396291209036,
"y": 216.94151328212496
},
"dragging": false
}
],
"edges": [
{
"source": "openAIEmbeddings_0",
"sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
"target": "pineconeExistingIndex_0",
"targetHandle": "pineconeExistingIndex_0-input-embeddings-Embeddings",
"type": "buttonedge",
"id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_0-pineconeExistingIndex_0-input-embeddings-Embeddings",
"data": {
"label": ""
}
},
{
"source": "pineconeExistingIndex_0",
"sourceHandle": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever",
"target": "retrieverTool_0",
"targetHandle": "retrieverTool_0-input-retriever-BaseRetriever",
"type": "buttonedge",
"id": "pineconeExistingIndex_0-pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-retrieverTool_0-retrieverTool_0-input-retriever-BaseRetriever",
"data": {
"label": ""
}
},
{
"source": "retrieverTool_0",
"sourceHandle": "retrieverTool_0-output-retrieverTool-RetrieverTool|DynamicTool|Tool|StructuredTool|Runnable",
"target": "conversationalRetrievalAgent_0",
"targetHandle": "conversationalRetrievalAgent_0-input-tools-Tool",
"type": "buttonedge",
"id": "retrieverTool_0-retrieverTool_0-output-retrieverTool-RetrieverTool|DynamicTool|Tool|StructuredTool|Runnable-conversationalRetrievalAgent_0-conversationalRetrievalAgent_0-input-tools-Tool",
"data": {
"label": ""
}
},
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"target": "conversationalRetrievalAgent_0",
"targetHandle": "conversationalRetrievalAgent_0-input-model-ChatOpenAI",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalRetrievalAgent_0-conversationalRetrievalAgent_0-input-model-ChatOpenAI",
"data": {
"label": ""
}
},
{
"source": "bufferMemory_0",
"sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
"target": "conversationalRetrievalAgent_0",
"targetHandle": "conversationalRetrievalAgent_0-input-memory-BaseChatMemory",
"type": "buttonedge",
"id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationalRetrievalAgent_0-conversationalRetrievalAgent_0-input-memory-BaseChatMemory",
"data": {
"label": ""
}
}
]
}

View File

@ -2,6 +2,7 @@ import path from 'path'
import { IChildProcessMessage, IReactFlowNode, IReactFlowObject, IRunChatflowMessageValue, INodeData } from './Interface'
import {
buildLangchain,
checkMemorySessionId,
constructGraphs,
getEndingNode,
getStartingNodes,
@ -137,7 +138,10 @@ export class ChildProcess {
const nodeInstance = new nodeModule.nodeClass()
logger.debug(`[server] [mode:child]: Running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`)
const result = await nodeInstance.run(nodeToExecuteData, incomingInput.question, { chatHistory: incomingInput.history })
if (nodeToExecuteData.instance) checkMemorySessionId(nodeToExecuteData.instance, chatId)
const result = await nodeInstance.run(nodeToExecuteData, incomingInput.question, { chatHistory: incomingInput.history, chatId })
logger.debug(`[server] [mode:child]: Finished running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`)
await sendToParentProcess('finish', { result, addToChatFlowPool })

View File

@ -45,7 +45,8 @@ import {
decryptCredentialData,
clearSessionMemory,
replaceInputsWithConfig,
getEncryptionKey
getEncryptionKey,
checkMemorySessionId
} from './utils'
import { cloneDeep, omit } from 'lodash'
import { getDataSource } from './DataSource'
@ -661,7 +662,11 @@ export class App {
templates.push(template)
})
const FlowiseDocsQnA = templates.find((tmp) => tmp.name === 'Flowise Docs QnA')
if (FlowiseDocsQnA) templates.unshift(FlowiseDocsQnA)
const FlowiseDocsQnAIndex = templates.findIndex((tmp) => tmp.name === 'Flowise Docs QnA')
if (FlowiseDocsQnA && FlowiseDocsQnAIndex > 0) {
templates.splice(FlowiseDocsQnAIndex, 1)
templates.unshift(FlowiseDocsQnA)
}
return res.json(templates)
})
@ -988,14 +993,24 @@ export class App {
isStreamValid = isStreamValid && !isVectorStoreFaiss(nodeToExecuteData)
logger.debug(`[server]: Running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`)
if (nodeToExecuteData.instance) checkMemorySessionId(nodeToExecuteData.instance, chatId)
const result = isStreamValid
? await nodeInstance.run(nodeToExecuteData, incomingInput.question, {
chatHistory: incomingInput.history,
socketIO,
socketIOClientId: incomingInput.socketIOClientId,
logger
logger,
appDataSource: this.AppDataSource,
databaseEntities
})
: await nodeInstance.run(nodeToExecuteData, incomingInput.question, {
chatHistory: incomingInput.history,
logger,
appDataSource: this.AppDataSource,
databaseEntities
})
: await nodeInstance.run(nodeToExecuteData, incomingInput.question, { chatHistory: incomingInput.history, logger })
logger.debug(`[server]: Finished running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`)
return res.json(result)

View File

@ -787,7 +787,7 @@ export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNod
isValidChainOrAgent = !blacklistChains.includes(endingNodeData.name)
} else if (endingNodeData.category === 'Agents') {
// Agent that are available to stream
const whitelistAgents = ['openAIFunctionAgent', 'csvAgent', 'airtableAgent']
const whitelistAgents = ['openAIFunctionAgent', 'csvAgent', 'airtableAgent', 'conversationalRetrievalAgent']
isValidChainOrAgent = whitelistAgents.includes(endingNodeData.name)
}
@ -908,3 +908,15 @@ export const redactCredentialWithPasswordType = (
}
return plainDataObj
}
/**
* Replace sessionId with new chatId
* Ex: after clear chat history, use the new chatId as sessionId
* @param {any} instance
* @param {string} chatId
*/
export const checkMemorySessionId = (instance: any, chatId: string) => {
if (instance.memory && instance.memory.isSessionIdUsingChatMessageId && chatId) {
instance.memory.sessionId = chatId
}
}