Feature/Buffer Memory SessionId (#2111)

* add sessionId to buffer memory, add conversation summary buffer memory

* add fix for conv retrieval qa chain
This commit is contained in:
Henry Heng 2024-04-11 11:18:39 +01:00 committed by GitHub
parent 57b716c7d7
commit c33642cdf9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
39 changed files with 784 additions and 574 deletions

View File

@ -1,31 +0,0 @@
import { INodeParams, INodeCredential } from '../src/Interface'
class MotorheadMemoryApi implements INodeCredential {
label: string
name: string
version: number
description: string
inputs: INodeParams[]
constructor() {
this.label = 'Motorhead Memory API'
this.name = 'motorheadMemoryApi'
this.version = 1.0
this.description =
'Refer to <a target="_blank" href="https://docs.getmetal.io/misc-get-keys">official guide</a> on how to create API key and Client ID on Motorhead Memory'
this.inputs = [
{
label: 'Client ID',
name: 'clientId',
type: 'string'
},
{
label: 'API Key',
name: 'apiKey',
type: 'password'
}
]
}
}
module.exports = { credClass: MotorheadMemoryApi }

View File

@ -9,7 +9,7 @@ import { RunnableSequence } from '@langchain/core/runnables'
import { ChatConversationalAgent } from 'langchain/agents'
import { getBaseClasses } from '../../../src/utils'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { IVisionChatModal, FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
import { IVisionChatModal, FlowiseMemory, ICommonObject, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
import { AgentExecutor } from '../../../src/agents'
import { addImagesToMessages, llmSupportsVision } from '../../../src/multiModalUtils'
import { checkInputs, Moderation } from '../../moderation/Moderation'
@ -92,7 +92,7 @@ class ConversationalAgent_Agents implements INode {
}
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
return prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input }, options.chatHistory)
return prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | object> {
@ -109,12 +109,7 @@ class ConversationalAgent_Agents implements INode {
return formatResponse(e.message)
}
}
const executor = await prepareAgent(
nodeData,
options,
{ sessionId: this.sessionId, chatId: options.chatId, input },
options.chatHistory
)
const executor = await prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const callbacks = await additionalCallbacks(nodeData, options)
@ -178,8 +173,7 @@ class ConversationalAgent_Agents implements INode {
const prepareAgent = async (
nodeData: INodeData,
options: ICommonObject,
flowObj: { sessionId?: string; chatId?: string; input?: string },
chatHistory: IMessage[] = []
flowObj: { sessionId?: string; chatId?: string; input?: string }
) => {
const model = nodeData.inputs?.model as BaseChatModel
let tools = nodeData.inputs?.tools as Tool[]
@ -238,7 +232,7 @@ const prepareAgent = async (
[inputKey]: (i: { input: string; steps: AgentStep[] }) => i.input,
agent_scratchpad: async (i: { input: string; steps: AgentStep[] }) => await constructScratchPad(i.steps),
[memoryKey]: async (_: { input: string; steps: AgentStep[] }) => {
const messages = (await memory.getChatMessages(flowObj?.sessionId, true, chatHistory)) as BaseMessage[]
const messages = (await memory.getChatMessages(flowObj?.sessionId, true)) as BaseMessage[]
return messages ?? []
}
},

View File

@ -6,7 +6,7 @@ import { RunnableSequence } from '@langchain/core/runnables'
import { ChatOpenAI, formatToOpenAIFunction } from '@langchain/openai'
import { ChatPromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
import { OpenAIFunctionsAgentOutputParser } from 'langchain/agents/openai/output_parser'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { FlowiseMemory, ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { AgentExecutor, formatAgentSteps } from '../../../src/agents'
@ -77,7 +77,7 @@ class ConversationalRetrievalAgent_Agents implements INode {
}
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
return prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input }, options.chatHistory)
return prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input })
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | object> {
@ -95,7 +95,7 @@ class ConversationalRetrievalAgent_Agents implements INode {
}
}
const executor = prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input }, options.chatHistory)
const executor = prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input })
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const callbacks = await additionalCallbacks(nodeData, options)
@ -127,11 +127,7 @@ class ConversationalRetrievalAgent_Agents implements INode {
}
}
const prepareAgent = (
nodeData: INodeData,
flowObj: { sessionId?: string; chatId?: string; input?: string },
chatHistory: IMessage[] = []
) => {
const prepareAgent = (nodeData: INodeData, flowObj: { sessionId?: string; chatId?: string; input?: string }) => {
const model = nodeData.inputs?.model as ChatOpenAI
const memory = nodeData.inputs?.memory as FlowiseMemory
const systemMessage = nodeData.inputs?.systemMessage as string
@ -156,7 +152,7 @@ const prepareAgent = (
[inputKey]: (i: { input: string; steps: AgentStep[] }) => i.input,
agent_scratchpad: (i: { input: string; steps: AgentStep[] }) => formatAgentSteps(i.steps),
[memoryKey]: async (_: { input: string; steps: AgentStep[] }) => {
const messages = (await memory.getChatMessages(flowObj?.sessionId, true, chatHistory)) as BaseMessage[]
const messages = (await memory.getChatMessages(flowObj?.sessionId, true)) as BaseMessage[]
return messages ?? []
}
},

View File

@ -125,8 +125,7 @@ class MRKLAgentChat_Agents implements INode {
const callbacks = await additionalCallbacks(nodeData, options)
const prevChatHistory = options.chatHistory
const chatHistory = ((await memory.getChatMessages(this.sessionId, false, prevChatHistory)) as IMessage[]) ?? []
const chatHistory = ((await memory.getChatMessages(this.sessionId, false)) as IMessage[]) ?? []
const chatHistoryString = chatHistory.map((hist) => hist.message).join('\\n')
const result = await executor.invoke({ input, chat_history: chatHistoryString }, { callbacks })

View File

@ -8,7 +8,7 @@ import { convertToOpenAITool } from '@langchain/core/utils/function_calling'
import { ChatPromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
import { OpenAIToolsAgentOutputParser } from 'langchain/agents/openai/output_parser'
import { getBaseClasses } from '../../../src/utils'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
import { FlowiseMemory, ICommonObject, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { AgentExecutor, formatAgentSteps } from '../../../src/agents'
import { Moderation, checkInputs, streamResponse } from '../../moderation/Moderation'
@ -75,7 +75,7 @@ class MistralAIToolAgent_Agents implements INode {
}
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
return prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input }, options.chatHistory)
return prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input })
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
@ -93,7 +93,7 @@ class MistralAIToolAgent_Agents implements INode {
}
}
const executor = prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input }, options.chatHistory)
const executor = prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input })
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const callbacks = await additionalCallbacks(nodeData, options)
@ -154,11 +154,7 @@ class MistralAIToolAgent_Agents implements INode {
}
}
const prepareAgent = (
nodeData: INodeData,
flowObj: { sessionId?: string; chatId?: string; input?: string },
chatHistory: IMessage[] = []
) => {
const prepareAgent = (nodeData: INodeData, flowObj: { sessionId?: string; chatId?: string; input?: string }) => {
const model = nodeData.inputs?.model as ChatOpenAI
const memory = nodeData.inputs?.memory as FlowiseMemory
const systemMessage = nodeData.inputs?.systemMessage as string
@ -183,7 +179,7 @@ const prepareAgent = (
[inputKey]: (i: { input: string; steps: AgentStep[] }) => i.input,
agent_scratchpad: (i: { input: string; steps: AgentStep[] }) => formatAgentSteps(i.steps),
[memoryKey]: async (_: { input: string; steps: AgentStep[] }) => {
const messages = (await memory.getChatMessages(flowObj?.sessionId, true, chatHistory)) as BaseMessage[]
const messages = (await memory.getChatMessages(flowObj?.sessionId, true)) as BaseMessage[]
return messages ?? []
}
},

View File

@ -7,7 +7,7 @@ import { ChatOpenAI, formatToOpenAIFunction } from '@langchain/openai'
import { ChatPromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
import { OpenAIFunctionsAgentOutputParser } from 'langchain/agents/openai/output_parser'
import { getBaseClasses } from '../../../src/utils'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
import { FlowiseMemory, ICommonObject, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { AgentExecutor, formatAgentSteps } from '../../../src/agents'
import { Moderation, checkInputs } from '../../moderation/Moderation'
@ -74,7 +74,7 @@ class OpenAIFunctionAgent_Agents implements INode {
}
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
return prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input }, options.chatHistory)
return prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input })
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
@ -92,7 +92,7 @@ class OpenAIFunctionAgent_Agents implements INode {
}
}
const executor = prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input }, options.chatHistory)
const executor = prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input })
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const callbacks = await additionalCallbacks(nodeData, options)
@ -153,11 +153,7 @@ class OpenAIFunctionAgent_Agents implements INode {
}
}
const prepareAgent = (
nodeData: INodeData,
flowObj: { sessionId?: string; chatId?: string; input?: string },
chatHistory: IMessage[] = []
) => {
const prepareAgent = (nodeData: INodeData, flowObj: { sessionId?: string; chatId?: string; input?: string }) => {
const model = nodeData.inputs?.model as ChatOpenAI
const memory = nodeData.inputs?.memory as FlowiseMemory
const systemMessage = nodeData.inputs?.systemMessage as string
@ -182,7 +178,7 @@ const prepareAgent = (
[inputKey]: (i: { input: string; steps: AgentStep[] }) => i.input,
agent_scratchpad: (i: { input: string; steps: AgentStep[] }) => formatAgentSteps(i.steps),
[memoryKey]: async (_: { input: string; steps: AgentStep[] }) => {
const messages = (await memory.getChatMessages(flowObj?.sessionId, true, chatHistory)) as BaseMessage[]
const messages = (await memory.getChatMessages(flowObj?.sessionId, true)) as BaseMessage[]
return messages ?? []
}
},

View File

@ -8,7 +8,7 @@ import { convertToOpenAITool } from '@langchain/core/utils/function_calling'
import { formatToOpenAIToolMessages } from 'langchain/agents/format_scratchpad/openai_tools'
import { OpenAIToolsAgentOutputParser, type ToolsAgentStep } from 'langchain/agents/openai/output_parser'
import { getBaseClasses } from '../../../src/utils'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
import { FlowiseMemory, ICommonObject, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { AgentExecutor } from '../../../src/agents'
import { Moderation, checkInputs } from '../../moderation/Moderation'
@ -75,7 +75,7 @@ class OpenAIToolAgent_Agents implements INode {
}
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
return prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input }, options.chatHistory)
return prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input })
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
@ -93,7 +93,7 @@ class OpenAIToolAgent_Agents implements INode {
}
}
const executor = prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input }, options.chatHistory)
const executor = prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input })
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const callbacks = await additionalCallbacks(nodeData, options)
@ -154,11 +154,7 @@ class OpenAIToolAgent_Agents implements INode {
}
}
const prepareAgent = (
nodeData: INodeData,
flowObj: { sessionId?: string; chatId?: string; input?: string },
chatHistory: IMessage[] = []
) => {
const prepareAgent = (nodeData: INodeData, flowObj: { sessionId?: string; chatId?: string; input?: string }) => {
const model = nodeData.inputs?.model as ChatOpenAI
const memory = nodeData.inputs?.memory as FlowiseMemory
const systemMessage = nodeData.inputs?.systemMessage as string
@ -181,7 +177,7 @@ const prepareAgent = (
[inputKey]: (i: { input: string; steps: ToolsAgentStep[] }) => i.input,
agent_scratchpad: (i: { input: string; steps: ToolsAgentStep[] }) => formatToOpenAIToolMessages(i.steps),
[memoryKey]: async (_: { input: string; steps: ToolsAgentStep[] }) => {
const messages = (await memory.getChatMessages(flowObj?.sessionId, true, chatHistory)) as BaseMessage[]
const messages = (await memory.getChatMessages(flowObj?.sessionId, true)) as BaseMessage[]
return messages ?? []
}
},

View File

@ -61,7 +61,7 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
return null
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
async run(nodeData: INodeData, input: string): Promise<string | ICommonObject> {
const memory = nodeData.inputs?.memory as FlowiseMemory
const model = nodeData.inputs?.model as OpenAI
const systemMessage = nodeData.inputs?.systemMessage as string
@ -77,7 +77,7 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
})
}
const msgs = (await memory.getChatMessages(this.sessionId, false, options.chatHistory)) as IMessage[]
const msgs = (await memory.getChatMessages(this.sessionId, false)) as IMessage[]
for (const message of msgs) {
if (message.type === 'apiMessage') {
chatHistory.push({

View File

@ -116,7 +116,7 @@ class XMLAgent_Agents implements INode {
return formatResponse(e.message)
}
}
const executor = await prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input }, options.chatHistory)
const executor = await prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input })
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const callbacks = await additionalCallbacks(nodeData, options)
@ -177,11 +177,7 @@ class XMLAgent_Agents implements INode {
}
}
const prepareAgent = async (
nodeData: INodeData,
flowObj: { sessionId?: string; chatId?: string; input?: string },
chatHistory: IMessage[] = []
) => {
const prepareAgent = async (nodeData: INodeData, flowObj: { sessionId?: string; chatId?: string; input?: string }) => {
const model = nodeData.inputs?.model as BaseChatModel
const memory = nodeData.inputs?.memory as FlowiseMemory
const systemMessage = nodeData.inputs?.systemMessage as string
@ -207,7 +203,7 @@ const prepareAgent = async (
const llmWithStop = model.bind({ stop: ['</tool_input>', '</final_answer>'] })
const messages = (await memory.getChatMessages(flowObj.sessionId, false, chatHistory)) as IMessage[]
const messages = (await memory.getChatMessages(flowObj.sessionId, false)) as IMessage[]
let chatHistoryMsgTxt = ''
for (const message of messages) {
if (message.type === 'apiMessage') {

View File

@ -217,7 +217,6 @@ const prepareChatPrompt = (nodeData: INodeData, humanImageMessages: MessageConte
}
const prepareChain = (nodeData: INodeData, options: ICommonObject, sessionId?: string) => {
const chatHistory = options.chatHistory
let model = nodeData.inputs?.model as BaseChatModel
const memory = nodeData.inputs?.memory as FlowiseMemory
const memoryKey = memory.memoryKey ?? 'chat_history'
@ -253,7 +252,7 @@ const prepareChain = (nodeData: INodeData, options: ICommonObject, sessionId?: s
{
[inputKey]: (input: { input: string }) => input.input,
[memoryKey]: async () => {
const history = await memory.getChatMessages(sessionId, true, chatHistory)
const history = await memory.getChatMessages(sessionId, true)
return history
},
...promptVariables

View File

@ -1,4 +1,5 @@
import { applyPatch } from 'fast-json-patch'
import { DataSource } from 'typeorm'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { BaseRetriever } from '@langchain/core/retrievers'
import { PromptTemplate, ChatPromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
@ -11,9 +12,18 @@ import { StringOutputParser } from '@langchain/core/output_parsers'
import type { Document } from '@langchain/core/documents'
import { BufferMemoryInput } from 'langchain/memory'
import { ConversationalRetrievalQAChain } from 'langchain/chains'
import { convertBaseMessagetoIMessage, getBaseClasses } from '../../../src/utils'
import { getBaseClasses, mapChatMessageToBaseMessage } from '../../../src/utils'
import { ConsoleCallbackHandler, additionalCallbacks } from '../../../src/handler'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, MemoryMethods } from '../../../src/Interface'
import {
FlowiseMemory,
ICommonObject,
IMessage,
INode,
INodeData,
INodeParams,
IDatabaseEntity,
MemoryMethods
} from '../../../src/Interface'
import { QA_TEMPLATE, REPHRASE_TEMPLATE, RESPONSE_TEMPLATE } from './prompts'
type RetrievalChainInput = {
@ -166,6 +176,10 @@ class ConversationalRetrievalQAChain_Chains implements INode {
const responsePrompt = nodeData.inputs?.responsePrompt as string
const returnSourceDocuments = nodeData.inputs?.returnSourceDocuments as boolean
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
const chatflowid = options.chatflowid as string
let customResponsePrompt = responsePrompt
// If the deprecated systemMessagePrompt is still exists
if (systemMessagePrompt) {
@ -178,7 +192,9 @@ class ConversationalRetrievalQAChain_Chains implements INode {
memory = new BufferMemory({
returnMessages: true,
memoryKey: 'chat_history',
inputKey: 'input'
appDataSource,
databaseEntities,
chatflowid
})
}
@ -194,7 +210,7 @@ class ConversationalRetrievalQAChain_Chains implements INode {
}
const answerChain = createChain(model, vectorStoreRetriever, rephrasePrompt, customResponsePrompt)
const history = ((await memory.getChatMessages(this.sessionId, false, options.chatHistory)) as IMessage[]) ?? []
const history = ((await memory.getChatMessages(this.sessionId, false)) as IMessage[]) ?? []
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const additionalCallback = await additionalCallbacks(nodeData, options)
@ -367,31 +383,59 @@ const createChain = (
return conversationalQAChain
}
interface BufferMemoryExtendedInput {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
}
class BufferMemory extends FlowiseMemory implements MemoryMethods {
constructor(fields: BufferMemoryInput) {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
constructor(fields: BufferMemoryInput & BufferMemoryExtendedInput) {
super(fields)
this.appDataSource = fields.appDataSource
this.databaseEntities = fields.databaseEntities
this.chatflowid = fields.chatflowid
}
async getChatMessages(_?: string, returnBaseMessages = false, prevHistory: IMessage[] = []): Promise<IMessage[] | BaseMessage[]> {
await this.chatHistory.clear()
async getChatMessages(overrideSessionId = '', returnBaseMessages = false): Promise<IMessage[] | BaseMessage[]> {
if (!overrideSessionId) return []
for (const msg of prevHistory) {
if (msg.type === 'userMessage') await this.chatHistory.addUserMessage(msg.message)
else if (msg.type === 'apiMessage') await this.chatHistory.addAIChatMessage(msg.message)
const chatMessage = await this.appDataSource.getRepository(this.databaseEntities['ChatMessage']).find({
where: {
sessionId: overrideSessionId,
chatflowid: this.chatflowid
},
order: {
createdDate: 'ASC'
}
})
if (returnBaseMessages) {
return mapChatMessageToBaseMessage(chatMessage)
}
const memoryResult = await this.loadMemoryVariables({})
const baseMessages = memoryResult[this.memoryKey ?? 'chat_history']
return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages)
let returnIMessages: IMessage[] = []
for (const m of chatMessage) {
returnIMessages.push({
message: m.content as string,
type: m.role
})
}
return returnIMessages
}
async addChatMessages(): Promise<void> {
// adding chat messages will be done on the fly in getChatMessages()
// adding chat messages is done on server level
return
}
async clearChatMessages(): Promise<void> {
await this.clear()
// clearing chat messages is done on server level
return
}
}

View File

@ -83,7 +83,7 @@ class ContextChatEngine_LlamaIndex implements INode {
const chatEngine = new ContextChatEngine({ chatModel: model, retriever: vectorStoreRetriever })
const msgs = (await memory.getChatMessages(this.sessionId, false, options.chatHistory)) as IMessage[]
const msgs = (await memory.getChatMessages(this.sessionId, false)) as IMessage[]
for (const message of msgs) {
if (message.type === 'apiMessage') {
chatHistory.push({

View File

@ -68,7 +68,7 @@ class SimpleChatEngine_LlamaIndex implements INode {
const chatEngine = new SimpleChatEngine({ llm: model })
const msgs = (await memory.getChatMessages(this.sessionId, false, options.chatHistory)) as IMessage[]
const msgs = (await memory.getChatMessages(this.sessionId, false)) as IMessage[]
for (const message of msgs) {
if (message.type === 'apiMessage') {
chatHistory.push({

View File

@ -1,7 +1,17 @@
import { FlowiseMemory, IMessage, INode, INodeData, INodeParams, MemoryMethods } from '../../../src/Interface'
import { convertBaseMessagetoIMessage, getBaseClasses } from '../../../src/utils'
import {
FlowiseMemory,
IDatabaseEntity,
ICommonObject,
IMessage,
INode,
INodeData,
INodeParams,
MemoryMethods
} from '../../../src/Interface'
import { getBaseClasses, mapChatMessageToBaseMessage } from '../../../src/utils'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
import { BaseMessage } from '@langchain/core/messages'
import { DataSource } from 'typeorm'
class BufferMemory_Memory implements INode {
label: string
@ -17,64 +27,109 @@ class BufferMemory_Memory implements INode {
constructor() {
this.label = 'Buffer Memory'
this.name = 'bufferMemory'
this.version = 1.0
this.version = 2.0
this.type = 'BufferMemory'
this.icon = 'memory.svg'
this.category = 'Memory'
this.description = 'Remembers previous conversational back and forths directly'
this.description = 'Retrieve chat messages stored in database'
this.baseClasses = [this.type, ...getBaseClasses(BufferMemory)]
this.inputs = [
{
label: 'Session Id',
name: 'sessionId',
type: 'string',
description:
'If not specified, a random id will be used. Learn <a target="_blank" href="https://docs.flowiseai.com/memory#ui-and-embedded-chat">more</a>',
default: '',
additionalParams: true,
optional: true
},
{
label: 'Memory Key',
name: 'memoryKey',
type: 'string',
default: 'chat_history'
},
{
label: 'Input Key',
name: 'inputKey',
type: 'string',
default: 'input'
default: 'chat_history',
additionalParams: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const memoryKey = nodeData.inputs?.memoryKey as string
const inputKey = nodeData.inputs?.inputKey as string
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const sessionId = nodeData.inputs?.sessionId as string
const memoryKey = (nodeData.inputs?.memoryKey as string) ?? 'chat_history'
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
const chatflowid = options.chatflowid as string
return new BufferMemoryExtended({
returnMessages: true,
memoryKey,
inputKey
sessionId,
appDataSource,
databaseEntities,
chatflowid
})
}
}
interface BufferMemoryExtendedInput {
sessionId: string
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
}
class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
constructor(fields: BufferMemoryInput) {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
sessionId = ''
constructor(fields: BufferMemoryInput & BufferMemoryExtendedInput) {
super(fields)
this.sessionId = fields.sessionId
this.appDataSource = fields.appDataSource
this.databaseEntities = fields.databaseEntities
this.chatflowid = fields.chatflowid
}
async getChatMessages(_?: string, returnBaseMessages = false, prevHistory: IMessage[] = []): Promise<IMessage[] | BaseMessage[]> {
await this.chatHistory.clear()
async getChatMessages(overrideSessionId = '', returnBaseMessages = false): Promise<IMessage[] | BaseMessage[]> {
const id = overrideSessionId ? overrideSessionId : this.sessionId
if (!id) return []
for (const msg of prevHistory) {
if (msg.type === 'userMessage') await this.chatHistory.addUserMessage(msg.message)
else if (msg.type === 'apiMessage') await this.chatHistory.addAIChatMessage(msg.message)
const chatMessage = await this.appDataSource.getRepository(this.databaseEntities['ChatMessage']).find({
where: {
sessionId: id,
chatflowid: this.chatflowid
},
order: {
createdDate: 'ASC'
}
})
if (returnBaseMessages) {
return mapChatMessageToBaseMessage(chatMessage)
}
const memoryResult = await this.loadMemoryVariables({})
const baseMessages = memoryResult[this.memoryKey ?? 'chat_history']
return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages)
let returnIMessages: IMessage[] = []
for (const m of chatMessage) {
returnIMessages.push({
message: m.content as string,
type: m.role
})
}
return returnIMessages
}
async addChatMessages(): Promise<void> {
// adding chat messages will be done on the fly in getChatMessages()
// adding chat messages is done on server level
return
}
async clearChatMessages(): Promise<void> {
await this.clear()
// clearing chat messages is done on server level
return
}
}

View File

@ -1,7 +1,17 @@
import { FlowiseWindowMemory, IMessage, INode, INodeData, INodeParams, MemoryMethods } from '../../../src/Interface'
import { convertBaseMessagetoIMessage, getBaseClasses } from '../../../src/utils'
import {
FlowiseWindowMemory,
ICommonObject,
IDatabaseEntity,
IMessage,
INode,
INodeData,
INodeParams,
MemoryMethods
} from '../../../src/Interface'
import { getBaseClasses, mapChatMessageToBaseMessage } from '../../../src/utils'
import { BufferWindowMemory, BufferWindowMemoryInput } from 'langchain/memory'
import { BaseMessage } from '@langchain/core/messages'
import { DataSource } from 'typeorm'
class BufferWindowMemory_Memory implements INode {
label: string
@ -17,77 +27,124 @@ class BufferWindowMemory_Memory implements INode {
constructor() {
this.label = 'Buffer Window Memory'
this.name = 'bufferWindowMemory'
this.version = 1.0
this.version = 2.0
this.type = 'BufferWindowMemory'
this.icon = 'memory.svg'
this.category = 'Memory'
this.description = 'Uses a window of size k to surface the last k back-and-forth to use as memory'
this.baseClasses = [this.type, ...getBaseClasses(BufferWindowMemory)]
this.inputs = [
{
label: 'Memory Key',
name: 'memoryKey',
type: 'string',
default: 'chat_history'
},
{
label: 'Input Key',
name: 'inputKey',
type: 'string',
default: 'input'
},
{
label: 'Size',
name: 'k',
type: 'number',
default: '4',
description: 'Window of size k to surface the last k back-and-forth to use as memory.'
},
{
label: 'Session Id',
name: 'sessionId',
type: 'string',
description:
'If not specified, a random id will be used. Learn <a target="_blank" href="https://docs.flowiseai.com/memory#ui-and-embedded-chat">more</a>',
default: '',
optional: true,
additionalParams: true
},
{
label: 'Memory Key',
name: 'memoryKey',
type: 'string',
default: 'chat_history',
additionalParams: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const memoryKey = nodeData.inputs?.memoryKey as string
const inputKey = nodeData.inputs?.inputKey as string
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const k = nodeData.inputs?.k as string
const sessionId = nodeData.inputs?.sessionId as string
const memoryKey = (nodeData.inputs?.memoryKey as string) ?? 'chat_history'
const obj: Partial<BufferWindowMemoryInput> = {
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
const chatflowid = options.chatflowid as string
const obj: Partial<BufferWindowMemoryInput> & BufferMemoryExtendedInput = {
returnMessages: true,
memoryKey: memoryKey,
inputKey: inputKey,
k: parseInt(k, 10)
sessionId,
memoryKey,
k: parseInt(k, 10),
appDataSource,
databaseEntities,
chatflowid
}
return new BufferWindowMemoryExtended(obj)
}
}
interface BufferMemoryExtendedInput {
sessionId: string
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
}
class BufferWindowMemoryExtended extends FlowiseWindowMemory implements MemoryMethods {
constructor(fields: BufferWindowMemoryInput) {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
sessionId = ''
constructor(fields: BufferWindowMemoryInput & BufferMemoryExtendedInput) {
super(fields)
this.sessionId = fields.sessionId
this.appDataSource = fields.appDataSource
this.databaseEntities = fields.databaseEntities
this.chatflowid = fields.chatflowid
}
async getChatMessages(_?: string, returnBaseMessages = false, prevHistory: IMessage[] = []): Promise<IMessage[] | BaseMessage[]> {
await this.chatHistory.clear()
async getChatMessages(overrideSessionId = '', returnBaseMessages = false): Promise<IMessage[] | BaseMessage[]> {
const id = overrideSessionId ? overrideSessionId : this.sessionId
if (!id) return []
// Insert into chatHistory
for (const msg of prevHistory) {
if (msg.type === 'userMessage') await this.chatHistory.addUserMessage(msg.message)
else if (msg.type === 'apiMessage') await this.chatHistory.addAIChatMessage(msg.message)
let chatMessage = await this.appDataSource.getRepository(this.databaseEntities['ChatMessage']).find({
where: {
sessionId: id,
chatflowid: this.chatflowid
},
take: this.k + 1,
order: {
createdDate: 'DESC' // we get the latest top K
}
})
// reverse the order of human and ai messages
if (chatMessage.length) chatMessage.reverse()
if (returnBaseMessages) {
return mapChatMessageToBaseMessage(chatMessage)
}
const memoryResult = await this.loadMemoryVariables({})
const baseMessages = memoryResult[this.memoryKey ?? 'chat_history']
return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages)
let returnIMessages: IMessage[] = []
for (const m of chatMessage) {
returnIMessages.push({
message: m.content as string,
type: m.role
})
}
return returnIMessages
}
async addChatMessages(): Promise<void> {
// adding chat messages will be done on the fly in getChatMessages()
// adding chat messages is done on server level
return
}
async clearChatMessages(): Promise<void> {
await this.clear()
// clearing chat messages is done on server level
return
}
}

View File

@ -0,0 +1,187 @@
import {
IMessage,
IDatabaseEntity,
INode,
INodeData,
INodeParams,
MemoryMethods,
ICommonObject,
FlowiseSummaryBufferMemory
} from '../../../src/Interface'
import { getBaseClasses, mapChatMessageToBaseMessage } from '../../../src/utils'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { BaseMessage, getBufferString } from '@langchain/core/messages'
import { ConversationSummaryBufferMemory, ConversationSummaryBufferMemoryInput } from 'langchain/memory'
import { DataSource } from 'typeorm'
class ConversationSummaryBufferMemory_Memory implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'Conversation Summary Buffer Memory'
this.name = 'conversationSummaryBufferMemory'
this.version = 1.0
this.type = 'ConversationSummaryBufferMemory'
this.icon = 'memory.svg'
this.category = 'Memory'
this.description = 'Uses token length to decide when to summarize conversations'
this.baseClasses = [this.type, ...getBaseClasses(ConversationSummaryBufferMemory)]
this.inputs = [
{
label: 'Chat Model',
name: 'model',
type: 'BaseChatModel'
},
{
label: 'Max Token Limit',
name: 'maxTokenLimit',
type: 'number',
default: 2000,
description: 'Summarize conversations once token limit is reached. Default to 2000'
},
{
label: 'Session Id',
name: 'sessionId',
type: 'string',
description:
'If not specified, a random id will be used. Learn <a target="_blank" href="https://docs.flowiseai.com/memory#ui-and-embedded-chat">more</a>',
default: '',
optional: true,
additionalParams: true
},
{
label: 'Memory Key',
name: 'memoryKey',
type: 'string',
default: 'chat_history',
additionalParams: true
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel
const _maxTokenLimit = nodeData.inputs?.maxTokenLimit as string
const maxTokenLimit = _maxTokenLimit ? parseInt(_maxTokenLimit, 10) : 2000
const sessionId = nodeData.inputs?.sessionId as string
const memoryKey = (nodeData.inputs?.memoryKey as string) ?? 'chat_history'
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
const chatflowid = options.chatflowid as string
const obj: ConversationSummaryBufferMemoryInput & BufferMemoryExtendedInput = {
llm: model,
sessionId,
memoryKey,
maxTokenLimit,
returnMessages: true,
appDataSource,
databaseEntities,
chatflowid
}
return new ConversationSummaryBufferMemoryExtended(obj)
}
}
interface BufferMemoryExtendedInput {
sessionId: string
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
}
class ConversationSummaryBufferMemoryExtended extends FlowiseSummaryBufferMemory implements MemoryMethods {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
sessionId = ''
constructor(fields: ConversationSummaryBufferMemoryInput & BufferMemoryExtendedInput) {
super(fields)
this.sessionId = fields.sessionId
this.appDataSource = fields.appDataSource
this.databaseEntities = fields.databaseEntities
this.chatflowid = fields.chatflowid
}
async getChatMessages(overrideSessionId = '', returnBaseMessages = false): Promise<IMessage[] | BaseMessage[]> {
const id = overrideSessionId ? overrideSessionId : this.sessionId
if (!id) return []
let chatMessage = await this.appDataSource.getRepository(this.databaseEntities['ChatMessage']).find({
where: {
sessionId: id,
chatflowid: this.chatflowid
},
order: {
createdDate: 'ASC'
}
})
let baseMessages = mapChatMessageToBaseMessage(chatMessage)
// Prune baseMessages if it exceeds max token limit
if (this.movingSummaryBuffer) {
baseMessages = [new this.summaryChatMessageClass(this.movingSummaryBuffer), ...baseMessages]
}
let currBufferLength = 0
if (this.llm && typeof this.llm !== 'string') {
currBufferLength = await this.llm.getNumTokens(getBufferString(baseMessages, this.humanPrefix, this.aiPrefix))
if (currBufferLength > this.maxTokenLimit) {
const prunedMemory = []
while (currBufferLength > this.maxTokenLimit) {
const poppedMessage = baseMessages.shift()
if (poppedMessage) {
prunedMemory.push(poppedMessage)
currBufferLength = await this.llm.getNumTokens(getBufferString(baseMessages, this.humanPrefix, this.aiPrefix))
}
}
this.movingSummaryBuffer = await this.predictNewSummary(prunedMemory, this.movingSummaryBuffer)
}
}
// ----------- Finished Pruning ---------------
if (this.movingSummaryBuffer) {
baseMessages = [new this.summaryChatMessageClass(this.movingSummaryBuffer), ...baseMessages]
}
if (returnBaseMessages) {
return baseMessages
}
let returnIMessages: IMessage[] = []
for (const m of baseMessages) {
returnIMessages.push({
message: m.content as string,
type: m._getType() === 'human' ? 'userMessage' : 'apiMessage'
})
}
return returnIMessages
}
async addChatMessages(): Promise<void> {
// adding chat messages is done on server level
return
}
async clearChatMessages(): Promise<void> {
// clearing chat messages is done on server level
return
}
}
module.exports = { nodeClass: ConversationSummaryBufferMemory_Memory }

View File

@ -0,0 +1,19 @@
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_124_19510)">
<path d="M25 15V9C25 7.89543 24.1046 7 23 7H9C7.89543 7 7 7.89543 7 9V23C7 24.1046 7.89543 25 9 25H15" stroke="black" stroke-width="2" stroke-linecap="round"/>
<path d="M20 15V13C20 12.4477 19.5523 12 19 12H13C12.4477 12 12 12.4477 12 13V19C12 19.5523 12.4477 20 13 20H15" stroke="black" stroke-width="2" stroke-linecap="round"/>
<path d="M7 11H5" stroke="black" stroke-width="2" stroke-linecap="round"/>
<path d="M7 16H5" stroke="black" stroke-width="2" stroke-linecap="round"/>
<path d="M7 21H5" stroke="black" stroke-width="2" stroke-linecap="round"/>
<path d="M21 7L21 5" stroke="black" stroke-width="2" stroke-linecap="round"/>
<path d="M16 7L16 5" stroke="black" stroke-width="2" stroke-linecap="round"/>
<path d="M11 7L11 5" stroke="black" stroke-width="2" stroke-linecap="round"/>
<path d="M11 27L11 25" stroke="black" stroke-width="2" stroke-linecap="round"/>
<path d="M26 19H21C19.8954 19 19 19.8954 19 21V24.2857C19 25.3903 19.8954 26.2857 21 26.2857H21.4545V28L23.5 26.2857H26C27.1046 26.2857 28 25.3903 28 24.2857V21C28 19.8954 27.1046 19 26 19Z" stroke="black" stroke-width="2" stroke-linejoin="round"/>
</g>
<defs>
<clipPath id="clip0_124_19510">
<rect width="32" height="32" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@ -1,8 +1,18 @@
import { FlowiseSummaryMemory, IMessage, INode, INodeData, INodeParams, MemoryMethods } from '../../../src/Interface'
import { convertBaseMessagetoIMessage, getBaseClasses } from '../../../src/utils'
import {
FlowiseSummaryMemory,
IMessage,
IDatabaseEntity,
INode,
INodeData,
INodeParams,
MemoryMethods,
ICommonObject
} from '../../../src/Interface'
import { getBaseClasses, mapChatMessageToBaseMessage } from '../../../src/utils'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { BaseMessage } from '@langchain/core/messages'
import { BaseMessage, SystemMessage } from '@langchain/core/messages'
import { ConversationSummaryMemory, ConversationSummaryMemoryInput } from 'langchain/memory'
import { DataSource } from 'typeorm'
class ConversationSummaryMemory_Memory implements INode {
label: string
@ -18,7 +28,7 @@ class ConversationSummaryMemory_Memory implements INode {
constructor() {
this.label = 'Conversation Summary Memory'
this.name = 'conversationSummaryMemory'
this.version = 1.0
this.version = 2.0
this.type = 'ConversationSummaryMemory'
this.icon = 'memory.svg'
this.category = 'Memory'
@ -30,67 +40,123 @@ class ConversationSummaryMemory_Memory implements INode {
name: 'model',
type: 'BaseChatModel'
},
{
label: 'Session Id',
name: 'sessionId',
type: 'string',
description:
'If not specified, a random id will be used. Learn <a target="_blank" href="https://docs.flowiseai.com/memory#ui-and-embedded-chat">more</a>',
default: '',
optional: true,
additionalParams: true
},
{
label: 'Memory Key',
name: 'memoryKey',
type: 'string',
default: 'chat_history'
},
{
label: 'Input Key',
name: 'inputKey',
type: 'string',
default: 'input'
default: 'chat_history',
additionalParams: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel
const memoryKey = nodeData.inputs?.memoryKey as string
const inputKey = nodeData.inputs?.inputKey as string
const sessionId = nodeData.inputs?.sessionId as string
const memoryKey = (nodeData.inputs?.memoryKey as string) ?? 'chat_history'
const obj: ConversationSummaryMemoryInput = {
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
const chatflowid = options.chatflowid as string
const obj: ConversationSummaryMemoryInput & BufferMemoryExtendedInput = {
llm: model,
returnMessages: true,
memoryKey,
inputKey
returnMessages: true,
sessionId,
appDataSource,
databaseEntities,
chatflowid
}
return new ConversationSummaryMemoryExtended(obj)
}
}
interface BufferMemoryExtendedInput {
sessionId: string
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
}
class ConversationSummaryMemoryExtended extends FlowiseSummaryMemory implements MemoryMethods {
constructor(fields: ConversationSummaryMemoryInput) {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
sessionId = ''
constructor(fields: ConversationSummaryMemoryInput & BufferMemoryExtendedInput) {
super(fields)
this.sessionId = fields.sessionId
this.appDataSource = fields.appDataSource
this.databaseEntities = fields.databaseEntities
this.chatflowid = fields.chatflowid
}
async getChatMessages(_?: string, returnBaseMessages = false, prevHistory: IMessage[] = []): Promise<IMessage[] | BaseMessage[]> {
await this.chatHistory.clear()
this.buffer = ''
async getChatMessages(overrideSessionId = '', returnBaseMessages = false): Promise<IMessage[] | BaseMessage[]> {
const id = overrideSessionId ? overrideSessionId : this.sessionId
if (!id) return []
for (const msg of prevHistory) {
if (msg.type === 'userMessage') await this.chatHistory.addUserMessage(msg.message)
else if (msg.type === 'apiMessage') await this.chatHistory.addAIChatMessage(msg.message)
}
this.buffer = ''
let chatMessage = await this.appDataSource.getRepository(this.databaseEntities['ChatMessage']).find({
where: {
sessionId: id,
chatflowid: this.chatflowid
},
order: {
createdDate: 'ASC'
}
})
const baseMessages = mapChatMessageToBaseMessage(chatMessage)
// Get summary
const chatMessages = await this.chatHistory.getMessages()
this.buffer = chatMessages.length ? await this.predictNewSummary(chatMessages.slice(-2), this.buffer) : ''
if (this.llm && typeof this.llm !== 'string') {
this.buffer = baseMessages.length ? await this.predictNewSummary(baseMessages.slice(-2), this.buffer) : ''
}
const memoryResult = await this.loadMemoryVariables({})
const baseMessages = memoryResult[this.memoryKey ?? 'chat_history']
return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages)
if (returnBaseMessages) {
return [new SystemMessage(this.buffer)]
}
if (this.buffer) {
return [
{
message: this.buffer,
type: 'apiMessage'
}
]
}
let returnIMessages: IMessage[] = []
for (const m of chatMessage) {
returnIMessages.push({
message: m.content as string,
type: m.role
})
}
return returnIMessages
}
async addChatMessages(): Promise<void> {
// adding chat messages will be done on the fly in getChatMessages()
// adding chat messages is done on server level
return
}
async clearChatMessages(): Promise<void> {
await this.clear()
// clearing chat messages is done on server level
return
}
}

View File

@ -1,190 +0,0 @@
import { IMessage, INode, INodeData, INodeParams, MemoryMethods, MessageType } from '../../../src/Interface'
import { convertBaseMessagetoIMessage, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ICommonObject } from '../../../src'
import { MotorheadMemory, MotorheadMemoryInput, InputValues, OutputValues } from 'langchain/memory'
import fetch from 'node-fetch'
import { AIMessage, BaseMessage, ChatMessage, HumanMessage } from '@langchain/core/messages'
type MotorheadMessage = {
content: string
role: 'Human' | 'AI'
}
class MotorMemory_Memory implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]
constructor() {
this.label = 'Motorhead Memory'
this.name = 'motorheadMemory'
this.version = 1.0
this.type = 'MotorheadMemory'
this.icon = 'motorhead.svg'
this.category = 'Memory'
this.description = 'Use Motorhead Memory to store chat conversations'
this.baseClasses = [this.type, ...getBaseClasses(MotorheadMemory)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
optional: true,
description: 'Only needed when using hosted solution - https://getmetal.io',
credentialNames: ['motorheadMemoryApi']
}
this.inputs = [
{
label: 'Base URL',
name: 'baseURL',
type: 'string',
optional: true,
description: 'To use the online version, leave the URL blank. More details at https://getmetal.io.'
},
{
label: 'Session Id',
name: 'sessionId',
type: 'string',
description:
'If not specified, a random id will be used. Learn <a target="_blank" href="https://docs.flowiseai.com/memory/long-term-memory#ui-and-embedded-chat">more</a>',
default: '',
additionalParams: true,
optional: true
},
{
label: 'Memory Key',
name: 'memoryKey',
type: 'string',
default: 'chat_history',
additionalParams: true
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
return initalizeMotorhead(nodeData, options)
}
}
const initalizeMotorhead = async (nodeData: INodeData, options: ICommonObject): Promise<MotorheadMemory> => {
const memoryKey = nodeData.inputs?.memoryKey as string
const baseURL = nodeData.inputs?.baseURL as string
const sessionId = nodeData.inputs?.sessionId as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
const clientId = getCredentialParam('clientId', credentialData, nodeData)
let obj: MotorheadMemoryInput = {
returnMessages: true,
sessionId,
memoryKey
}
if (baseURL) {
obj = {
...obj,
url: baseURL
}
} else {
obj = {
...obj,
apiKey,
clientId
}
}
const motorheadMemory = new MotorheadMemoryExtended(obj)
// Get messages from sessionId
await motorheadMemory.init()
return motorheadMemory
}
class MotorheadMemoryExtended extends MotorheadMemory implements MemoryMethods {
constructor(fields: MotorheadMemoryInput) {
super(fields)
}
async saveContext(inputValues: InputValues, outputValues: OutputValues, overrideSessionId = ''): Promise<void> {
if (overrideSessionId) {
this.sessionId = overrideSessionId
}
return super.saveContext(inputValues, outputValues)
}
async clear(overrideSessionId = ''): Promise<void> {
if (overrideSessionId) {
this.sessionId = overrideSessionId
}
try {
await this.caller.call(fetch, `${this.url}/sessions/${this.sessionId}/memory`, {
//@ts-ignore
signal: this.timeout ? AbortSignal.timeout(this.timeout) : undefined,
headers: this._getHeaders() as ICommonObject,
method: 'DELETE'
})
} catch (error) {
console.error('Error deleting session: ', error)
}
// Clear the superclass's chat history
await this.chatHistory.clear()
await super.clear()
}
async getChatMessages(overrideSessionId = '', returnBaseMessages = false): Promise<IMessage[] | BaseMessage[]> {
const id = overrideSessionId ? overrideSessionId : this.sessionId
try {
const resp = await this.caller.call(fetch, `${this.url}/sessions/${id}/memory`, {
//@ts-ignore
signal: this.timeout ? AbortSignal.timeout(this.timeout) : undefined,
headers: this._getHeaders() as ICommonObject,
method: 'GET'
})
const data = await resp.json()
const rawStoredMessages: MotorheadMessage[] = data?.data?.messages ?? []
const baseMessages = rawStoredMessages.reverse().map((message) => {
const { content, role } = message
if (role === 'Human') {
return new HumanMessage(content)
} else if (role === 'AI') {
return new AIMessage(content)
} else {
// default to generic ChatMessage
return new ChatMessage(content, role)
}
})
return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages)
} catch (error) {
console.error('Error getting session: ', error)
return []
}
}
async addChatMessages(msgArray: { text: string; type: MessageType }[], overrideSessionId = ''): Promise<void> {
const id = overrideSessionId ? overrideSessionId : this.sessionId
const input = msgArray.find((msg) => msg.type === 'userMessage')
const output = msgArray.find((msg) => msg.type === 'apiMessage')
const inputValues = { [this.inputKey ?? 'input']: input?.text }
const outputValues = { output: output?.text }
await this.saveContext(inputValues, outputValues, id)
}
async clearChatMessages(overrideSessionId = ''): Promise<void> {
const id = overrideSessionId ? overrideSessionId : this.sessionId
await this.clear(id)
}
}
module.exports = { nodeClass: MotorMemory_Memory }

View File

@ -1,8 +0,0 @@
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M6.06235 8.8749L5.76394 9.47173C5.59153 9.81655 5.52303 10.2023 5.56092 10.5859C5.7365 12.3639 5.9998 15.8004 5.9998 19C5.9998 22.3834 8.35516 25.1484 9.74546 26.2991C9.91107 26.4361 10.1024 26.5359 10.3093 26.5943C11.5512 26.9447 14.4841 27.5 18.4998 27.5C22.8998 27.5 24.6665 25.5 24.9998 24.5L25.8568 22.3576C25.9513 22.1214 25.9998 21.8693 25.9998 21.6148V17L26.4998 11V7.97214C26.4998 7.66165 26.4275 7.35542 26.2887 7.07771L26.0333 6.5671C25.7064 5.91311 25.0379 5.5 24.3067 5.5C23.5174 5.5 22.8076 5.98058 22.5144 6.71347L22.1428 7.64238C22.0483 7.87862 21.9998 8.13073 21.9998 8.38516V10.6716C21.9998 11.202 21.7891 11.7107 21.414 12.0858L21.2598 12.24C20.794 12.7058 20.1051 12.8684 19.4802 12.6601C19.1664 12.5556 18.8294 12.5426 18.5085 12.6228L17.3359 12.916C17.1135 12.9716 16.9023 13.065 16.7116 13.1922L16.3137 13.4574C15.8002 13.7998 15.1573 13.8858 14.5718 13.6907L14.3077 13.6026C14.1038 13.5347 13.8902 13.5 13.6752 13.5H12.1832C12.0617 13.5 11.9411 13.5196 11.8258 13.558C11.1434 13.7855 10.4249 13.3258 10.3454 12.6108L10.0392 9.85452C10.0131 9.62002 9.94579 9.39199 9.84028 9.18096L9.68296 8.86632C9.27962 8.05963 8.38626 7.62271 7.50187 7.79959L7.45897 7.80817C6.85378 7.9292 6.33836 8.32288 6.06235 8.8749Z" fill="#FDC31F"/>
<path d="M11 17.5V15.5C11 14.1193 12.1193 13 13.5 13V13C14.8807 13 16 14.1193 16 15.5V17" stroke="#FFC826" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M16 17V14.5C16 13.1193 17.1193 12 18.5 12V12C19.8807 12 21 13.1193 21 14.5V17" stroke="#FFC826" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M21.2846 16.9789L21.8249 7.49597C21.9035 6.11749 23.0847 5.06369 24.4631 5.14225V5.14225C25.8416 5.22081 26.8954 6.40196 26.8168 7.78043L25.992 22.2553" stroke="#FFC826" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M19.0001 21.5H15.2501C14.0075 21.5 13.0001 20.4927 13.0001 19.25C13.0001 18.0074 14.0075 17 15.2501 17H20.9376C23.7336 17 26.0001 19.2666 26.0001 22.0625C26.0001 24.8091 23.8099 27.0548 21.0641 27.1234L14.2001 27.295C9.72719 27.4068 6.0369 23.8299 6.00038 19.3644C6.00002 19.32 6.00048 19.2757 6.00104 19.2313C6.00317 19.0608 6.00772 18.5791 6.00011 18C5.95702 14.7192 5.37544 10.857 5.25735 10.1039C5.24344 10.0152 5.23107 9.92835 5.22731 9.83864C5.17209 8.52062 6.15626 7.37454 7.48458 7.24431C8.86069 7.10939 10.0856 8.11557 10.2205 9.49167L11.0039 17.5778C11.0888 18.454 11.6715 19.2026 12.5 19.5V19.5" stroke="#FFC826" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M11 17.5001V15.5001C11 14.1193 12.1193 13.0001 13.5 13.0001C14.8807 13.0001 16 14.1193 16 15.5001V17.0001V14.5001C16 13.1193 17.1193 12.0001 18.5 12.0001C19.8807 12.0001 21 13.1193 21 14.5001V17.0001M21.2846 16.979L21.8249 7.49603C21.9035 6.11754 23.0847 5.06374 24.4631 5.14231C25.8416 5.22087 26.8954 6.40202 26.8168 7.78049L25.992 22.2553M19.0001 21.5001H15.2501C14.0075 21.5001 13.0001 20.4927 13.0001 19.2501M13.0001 19.2501C13.0001 18.0074 14.0075 17.0001 15.2501 17.0001H20.9376C23.7336 17.0001 26.0001 19.2666 26.0001 22.0626C26.0001 24.8092 23.8099 27.0548 21.0641 27.1235L14.2001 27.2951C9.72719 27.4069 6.0369 23.8299 6.00038 19.3644C6.00002 19.3201 6.00048 19.2757 6.00104 19.2314C6.00317 19.0609 6.00772 18.5791 6.00011 18.0001C5.95702 14.7192 5.37544 10.8571 5.25735 10.104C5.24344 10.0153 5.23107 9.9284 5.22731 9.83869C5.17209 8.52067 6.15626 7.3746 7.48458 7.24436C8.86069 7.10944 10.0856 8.11562 10.2205 9.49173L11.0039 17.5778C11.0706 18.2656 11.4439 18.8747 12.0021 19.2488C12.2987 19.4476 12.6807 19.4097 13.0001 19.2501V19.2501Z" stroke="#ECA601" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 3.7 KiB

View File

@ -40,7 +40,7 @@ class UpstashRedisBackedChatMemory_Memory implements INode {
constructor() {
this.label = 'Upstash Redis-Backed Chat Memory'
this.name = 'upstashRedisBackedChatMemory'
this.version = 1.0
this.version = 2.0
this.type = 'UpstashRedisBackedChatMemory'
this.icon = 'upstash.svg'
this.category = 'Memory'
@ -77,6 +77,13 @@ class UpstashRedisBackedChatMemory_Memory implements INode {
description: 'Omit this parameter to make sessions never expire',
additionalParams: true,
optional: true
},
{
label: 'Memory Key',
name: 'memoryKey',
type: 'string',
default: 'chat_history',
additionalParams: true
}
]
}
@ -89,8 +96,8 @@ class UpstashRedisBackedChatMemory_Memory implements INode {
const initalizeUpstashRedis = async (nodeData: INodeData, options: ICommonObject): Promise<BufferMemory> => {
const baseURL = nodeData.inputs?.baseURL as string
const sessionId = nodeData.inputs?.sessionId as string
const memoryKey = nodeData.inputs?.memoryKey as string
const _sessionTTL = nodeData.inputs?.sessionTTL as string
const sessionTTL = _sessionTTL ? parseInt(_sessionTTL, 10) : undefined
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
@ -108,7 +115,7 @@ const initalizeUpstashRedis = async (nodeData: INodeData, options: ICommonObject
})
const memory = new BufferMemoryExtended({
memoryKey: 'chat_history',
memoryKey: memoryKey ?? 'chat_history',
chatHistory: redisChatMessageHistory,
sessionId,
sessionTTL,

View File

@ -234,40 +234,34 @@ export class VectorStoreRetriever {
* Implement abstract classes and interface for memory
*/
import { BaseMessage } from '@langchain/core/messages'
import { BufferMemory, BufferWindowMemory, ConversationSummaryMemory } from 'langchain/memory'
import { BufferMemory, BufferWindowMemory, ConversationSummaryMemory, ConversationSummaryBufferMemory } from 'langchain/memory'
export interface MemoryMethods {
getChatMessages(overrideSessionId?: string, returnBaseMessages?: boolean, prevHistory?: IMessage[]): Promise<IMessage[] | BaseMessage[]>
getChatMessages(overrideSessionId?: string, returnBaseMessages?: boolean): Promise<IMessage[] | BaseMessage[]>
addChatMessages(msgArray: { text: string; type: MessageType }[], overrideSessionId?: string): Promise<void>
clearChatMessages(overrideSessionId?: string): Promise<void>
}
export abstract class FlowiseMemory extends BufferMemory implements MemoryMethods {
abstract getChatMessages(
overrideSessionId?: string,
returnBaseMessages?: boolean,
prevHistory?: IMessage[]
): Promise<IMessage[] | BaseMessage[]>
abstract getChatMessages(overrideSessionId?: string, returnBaseMessages?: boolean): Promise<IMessage[] | BaseMessage[]>
abstract addChatMessages(msgArray: { text: string; type: MessageType }[], overrideSessionId?: string): Promise<void>
abstract clearChatMessages(overrideSessionId?: string): Promise<void>
}
export abstract class FlowiseWindowMemory extends BufferWindowMemory implements MemoryMethods {
abstract getChatMessages(
overrideSessionId?: string,
returnBaseMessages?: boolean,
prevHistory?: IMessage[]
): Promise<IMessage[] | BaseMessage[]>
abstract getChatMessages(overrideSessionId?: string, returnBaseMessages?: boolean): Promise<IMessage[] | BaseMessage[]>
abstract addChatMessages(msgArray: { text: string; type: MessageType }[], overrideSessionId?: string): Promise<void>
abstract clearChatMessages(overrideSessionId?: string): Promise<void>
}
export abstract class FlowiseSummaryMemory extends ConversationSummaryMemory implements MemoryMethods {
abstract getChatMessages(
overrideSessionId?: string,
returnBaseMessages?: boolean,
prevHistory?: IMessage[]
): Promise<IMessage[] | BaseMessage[]>
abstract getChatMessages(overrideSessionId?: string, returnBaseMessages?: boolean): Promise<IMessage[] | BaseMessage[]>
abstract addChatMessages(msgArray: { text: string; type: MessageType }[], overrideSessionId?: string): Promise<void>
abstract clearChatMessages(overrideSessionId?: string): Promise<void>
}
export abstract class FlowiseSummaryBufferMemory extends ConversationSummaryBufferMemory implements MemoryMethods {
abstract getChatMessages(overrideSessionId?: string, returnBaseMessages?: boolean): Promise<IMessage[] | BaseMessage[]>
abstract addChatMessages(msgArray: { text: string; type: MessageType }[], overrideSessionId?: string): Promise<void>
abstract clearChatMessages(overrideSessionId?: string): Promise<void>
}

View File

@ -7,7 +7,6 @@ import { z } from 'zod'
import { DataSource } from 'typeorm'
import { ICommonObject, IDatabaseEntity, IMessage, INodeData, IVariable } from './Interface'
import { AES, enc } from 'crypto-js'
import { ChatMessageHistory } from 'langchain/memory'
import { AIMessage, HumanMessage, BaseMessage } from '@langchain/core/messages'
export const numberOrExpressionRegex = '^(\\d+\\.?\\d*|{{.*}})$' //return true if string consists only numbers OR expression {{}}
@ -576,22 +575,21 @@ export const getUserHome = (): string => {
}
/**
* Map incoming chat history to ChatMessageHistory
* @param {ICommonObject} options
* @returns {ChatMessageHistory}
* Map ChatMessage to BaseMessage
* @param {IChatMessage[]} chatmessages
* @returns {BaseMessage[]}
*/
export const mapChatHistory = (options: ICommonObject): ChatMessageHistory => {
export const mapChatMessageToBaseMessage = (chatmessages: any[] = []): BaseMessage[] => {
const chatHistory = []
const histories: IMessage[] = options.chatHistory ?? []
for (const message of histories) {
if (message.type === 'apiMessage') {
chatHistory.push(new AIMessage(message.message))
} else if (message.type === 'userMessage') {
chatHistory.push(new HumanMessage(message.message))
for (const message of chatmessages) {
if (message.role === 'apiMessage') {
chatHistory.push(new AIMessage(message.content))
} else if (message.role === 'userMessage') {
chatHistory.push(new HumanMessage(message.content))
}
}
return new ChatMessageHistory(chatHistory)
return chatHistory
}
/**
@ -615,7 +613,7 @@ export const convertChatHistoryToText = (chatHistory: IMessage[] = []): string =
/**
* Serialize array chat history to string
* @param {IMessage[]} chatHistory
* @param {string | Array<string>} chatHistory
* @returns {string}
*/
export const serializeChatHistory = (chatHistory: string | Array<string>) => {

View File

@ -642,32 +642,36 @@
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"version": 1,
"version": 2,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
"description": "Remembers previous conversational back and forths directly",
"description": "Retrieve chat messages stored in database",
"inputParams": [
{
"label": "Session Id",
"name": "sessionId",
"type": "string",
"description": "If not specified, a random id will be used. Learn <a target=\"_blank\" href=\"https://docs.flowiseai.com/memory#ui-and-embedded-chat\">more</a>",
"default": "",
"additionalParams": true,
"optional": true,
"id": "bufferMemory_0-input-sessionId-string"
},
{
"label": "Memory Key",
"name": "memoryKey",
"type": "string",
"default": "chat_history",
"additionalParams": true,
"id": "bufferMemory_0-input-memoryKey-string"
},
{
"label": "Input Key",
"name": "inputKey",
"type": "string",
"default": "input",
"id": "bufferMemory_0-input-inputKey-string"
}
],
"inputAnchors": [],
"inputs": {
"memoryKey": "chat_history",
"inputKey": "input"
"sessionId": "",
"memoryKey": "chat_history"
},
"outputAnchors": [
{

View File

@ -178,32 +178,36 @@
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"version": 1,
"version": 2,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
"description": "Remembers previous conversational back and forths directly",
"description": "Retrieve chat messages stored in database",
"inputParams": [
{
"label": "Session Id",
"name": "sessionId",
"type": "string",
"description": "If not specified, a random id will be used. Learn <a target=\"_blank\" href=\"https://docs.flowiseai.com/memory#ui-and-embedded-chat\">more</a>",
"default": "",
"additionalParams": true,
"optional": true,
"id": "bufferMemory_0-input-sessionId-string"
},
{
"label": "Memory Key",
"name": "memoryKey",
"type": "string",
"default": "chat_history",
"additionalParams": true,
"id": "bufferMemory_0-input-memoryKey-string"
},
{
"label": "Input Key",
"name": "inputKey",
"type": "string",
"default": "input",
"id": "bufferMemory_0-input-inputKey-string"
}
],
"inputAnchors": [],
"inputs": {
"memoryKey": "chat_history",
"inputKey": "input"
"sessionId": "",
"memoryKey": "chat_history"
},
"outputAnchors": [
{

View File

@ -15,32 +15,36 @@
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"version": 1,
"version": 2,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
"description": "Remembers previous conversational back and forths directly",
"description": "Retrieve chat messages stored in database",
"inputParams": [
{
"label": "Session Id",
"name": "sessionId",
"type": "string",
"description": "If not specified, a random id will be used. Learn <a target=\"_blank\" href=\"https://docs.flowiseai.com/memory#ui-and-embedded-chat\">more</a>",
"default": "",
"additionalParams": true,
"optional": true,
"id": "bufferMemory_0-input-sessionId-string"
},
{
"label": "Memory Key",
"name": "memoryKey",
"type": "string",
"default": "chat_history",
"additionalParams": true,
"id": "bufferMemory_0-input-memoryKey-string"
},
{
"label": "Input Key",
"name": "inputKey",
"type": "string",
"default": "input",
"id": "bufferMemory_0-input-inputKey-string"
}
],
"inputAnchors": [],
"inputs": {
"memoryKey": "chat_history",
"inputKey": "input"
"sessionId": "",
"memoryKey": "chat_history"
},
"outputAnchors": [
{

View File

@ -54,32 +54,36 @@
"data": {
"id": "bufferMemory_1",
"label": "Buffer Memory",
"version": 1,
"version": 2,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
"description": "Remembers previous conversational back and forths directly",
"description": "Retrieve chat messages stored in database",
"inputParams": [
{
"label": "Session Id",
"name": "sessionId",
"type": "string",
"description": "If not specified, a random id will be used. Learn <a target=\"_blank\" href=\"https://docs.flowiseai.com/memory#ui-and-embedded-chat\">more</a>",
"default": "",
"additionalParams": true,
"optional": true,
"id": "bufferMemory_1-input-sessionId-string"
},
{
"label": "Memory Key",
"name": "memoryKey",
"type": "string",
"default": "chat_history",
"additionalParams": true,
"id": "bufferMemory_1-input-memoryKey-string"
},
{
"label": "Input Key",
"name": "inputKey",
"type": "string",
"default": "input",
"id": "bufferMemory_1-input-inputKey-string"
}
],
"inputAnchors": [],
"inputs": {
"memoryKey": "chat_history",
"inputKey": "input"
"sessionId": "",
"memoryKey": "chat_history"
},
"outputAnchors": [
{

View File

@ -1550,32 +1550,36 @@
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"version": 1,
"version": 2,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
"description": "Remembers previous conversational back and forths directly",
"description": "Retrieve chat messages stored in database",
"inputParams": [
{
"label": "Session Id",
"name": "sessionId",
"type": "string",
"description": "If not specified, a random id will be used. Learn <a target=\"_blank\" href=\"https://docs.flowiseai.com/memory#ui-and-embedded-chat\">more</a>",
"default": "",
"additionalParams": true,
"optional": true,
"id": "bufferMemory_0-input-sessionId-string"
},
{
"label": "Memory Key",
"name": "memoryKey",
"type": "string",
"default": "chat_history",
"additionalParams": true,
"id": "bufferMemory_0-input-memoryKey-string"
},
{
"label": "Input Key",
"name": "inputKey",
"type": "string",
"default": "input",
"id": "bufferMemory_0-input-inputKey-string"
}
],
"inputAnchors": [],
"inputs": {
"memoryKey": "chat_history",
"inputKey": "input"
"sessionId": "",
"memoryKey": "chat_history"
},
"outputAnchors": [
{

View File

@ -54,32 +54,36 @@
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"version": 1,
"version": 2,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
"description": "Remembers previous conversational back and forths directly",
"description": "Retrieve chat messages stored in database",
"inputParams": [
{
"label": "Session Id",
"name": "sessionId",
"type": "string",
"description": "If not specified, a random id will be used. Learn <a target=\"_blank\" href=\"https://docs.flowiseai.com/memory#ui-and-embedded-chat\">more</a>",
"default": "",
"additionalParams": true,
"optional": true,
"id": "bufferMemory_0-input-sessionId-string"
},
{
"label": "Memory Key",
"name": "memoryKey",
"type": "string",
"default": "chat_history",
"additionalParams": true,
"id": "bufferMemory_0-input-memoryKey-string"
},
{
"label": "Input Key",
"name": "inputKey",
"type": "string",
"default": "input",
"id": "bufferMemory_0-input-inputKey-string"
}
],
"inputAnchors": [],
"inputs": {
"memoryKey": "chat_history",
"inputKey": "input"
"sessionId": "",
"memoryKey": "chat_history"
},
"outputAnchors": [
{

View File

@ -83,32 +83,36 @@
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"version": 1,
"version": 2,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
"description": "Remembers previous conversational back and forths directly",
"description": "Retrieve chat messages stored in database",
"inputParams": [
{
"label": "Session Id",
"name": "sessionId",
"type": "string",
"description": "If not specified, a random id will be used. Learn <a target=\"_blank\" href=\"https://docs.flowiseai.com/memory#ui-and-embedded-chat\">more</a>",
"default": "",
"additionalParams": true,
"optional": true,
"id": "bufferMemory_0-input-sessionId-string"
},
{
"label": "Memory Key",
"name": "memoryKey",
"type": "string",
"default": "chat_history",
"additionalParams": true,
"id": "bufferMemory_0-input-memoryKey-string"
},
{
"label": "Input Key",
"name": "inputKey",
"type": "string",
"default": "input",
"id": "bufferMemory_0-input-inputKey-string"
}
],
"inputAnchors": [],
"inputs": {
"memoryKey": "chat_history",
"inputKey": "input"
"sessionId": "",
"memoryKey": "chat_history"
},
"outputAnchors": [
{

View File

@ -257,32 +257,36 @@
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"version": 1,
"version": 2,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
"description": "Remembers previous conversational back and forths directly",
"description": "Retrieve chat messages stored in database",
"inputParams": [
{
"label": "Session Id",
"name": "sessionId",
"type": "string",
"description": "If not specified, a random id will be used. Learn <a target=\"_blank\" href=\"https://docs.flowiseai.com/memory#ui-and-embedded-chat\">more</a>",
"default": "",
"additionalParams": true,
"optional": true,
"id": "bufferMemory_0-input-sessionId-string"
},
{
"label": "Memory Key",
"name": "memoryKey",
"type": "string",
"default": "chat_history",
"additionalParams": true,
"id": "bufferMemory_0-input-memoryKey-string"
},
{
"label": "Input Key",
"name": "inputKey",
"type": "string",
"default": "input",
"id": "bufferMemory_0-input-inputKey-string"
}
],
"inputAnchors": [],
"inputs": {
"memoryKey": "chat_history",
"inputKey": "input"
"sessionId": "",
"memoryKey": "chat_history"
},
"outputAnchors": [
{

View File

@ -15,32 +15,36 @@
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"version": 1,
"version": 2,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
"description": "Remembers previous conversational back and forths directly",
"description": "Retrieve chat messages stored in database",
"inputParams": [
{
"label": "Session Id",
"name": "sessionId",
"type": "string",
"description": "If not specified, a random id will be used. Learn <a target=\"_blank\" href=\"https://docs.flowiseai.com/memory#ui-and-embedded-chat\">more</a>",
"default": "",
"additionalParams": true,
"optional": true,
"id": "bufferMemory_0-input-sessionId-string"
},
{
"label": "Memory Key",
"name": "memoryKey",
"type": "string",
"default": "chat_history",
"additionalParams": true,
"id": "bufferMemory_0-input-memoryKey-string"
},
{
"label": "Input Key",
"name": "inputKey",
"type": "string",
"default": "input",
"id": "bufferMemory_0-input-inputKey-string"
}
],
"inputAnchors": [],
"inputs": {
"memoryKey": "chat_history",
"inputKey": "input"
"sessionId": "",
"memoryKey": "chat_history"
},
"outputAnchors": [
{

View File

@ -195,7 +195,6 @@ export interface IMessage {
export interface IncomingInput {
question: string
history: IMessage[]
overrideConfig?: ICommonObject
socketIOClientId?: string
chatId?: string

View File

@ -132,7 +132,6 @@ export const utilBuildChatflow = async (req: Request, socketIO?: Server, isInter
incomingInput = {
question: req.body.question ?? 'hello',
overrideConfig,
history: [],
socketIOClientId: req.body.socketIOClientId
}
}
@ -146,8 +145,7 @@ export const utilBuildChatflow = async (req: Request, socketIO?: Server, isInter
// Get session ID
const memoryNode = findMemoryNode(nodes, edges)
const memoryType = memoryNode?.data.label
let sessionId = undefined
if (memoryNode) sessionId = getMemorySessionId(memoryNode, incomingInput, chatId, isInternal)
let sessionId = getMemorySessionId(memoryNode, incomingInput, chatId, isInternal)
/* Reuse the flow without having to rebuild (to avoid duplicated upsert, recomputation, reinitialization of memory) when all these conditions met:
* - Node Data already exists in pool
@ -225,9 +223,9 @@ export const utilBuildChatflow = async (req: Request, socketIO?: Server, isInter
// Once custom function ending node exists, flow is always unavailable to stream
isStreamValid = isEndingNodeExists ? false : isStreamValid
let chatHistory: IMessage[] = incomingInput.history ?? []
let chatHistory: IMessage[] = []
// When {{chat_history}} is used in Prompt Template, fetch the chat conversations from memory node
// When {{chat_history}} is used in Format Prompt Value, fetch the chat conversations from memory node
for (const endingNode of endingNodes) {
const endingNodeData = endingNode.data
@ -238,16 +236,15 @@ export const utilBuildChatflow = async (req: Request, socketIO?: Server, isInter
if (!memoryNode) continue
if (!chatHistory.length && (incomingInput.chatId || incomingInput.overrideConfig?.sessionId)) {
chatHistory = await getSessionChatHistory(
memoryNode,
appServer.nodesPool.componentNodes,
incomingInput,
appServer.AppDataSource,
databaseEntities,
logger
)
}
chatHistory = await getSessionChatHistory(
chatflowid,
getMemorySessionId(memoryNode, incomingInput, chatId, isInternal),
memoryNode,
appServer.nodesPool.componentNodes,
appServer.AppDataSource,
databaseEntities,
logger
)
}
/*** Get Starting Nodes with Reversed Graph ***/
@ -314,7 +311,6 @@ export const utilBuildChatflow = async (req: Request, socketIO?: Server, isInter
? await nodeInstance.run(nodeToExecuteData, incomingInput.question, {
chatId,
chatflowid,
chatHistory: incomingInput.history,
logger,
appDataSource: appServer.AppDataSource,
databaseEntities,
@ -326,7 +322,6 @@ export const utilBuildChatflow = async (req: Request, socketIO?: Server, isInter
: await nodeInstance.run(nodeToExecuteData, incomingInput.question, {
chatId,
chatflowid,
chatHistory: incomingInput.history,
logger,
appDataSource: appServer.AppDataSource,
databaseEntities,

View File

@ -1144,16 +1144,18 @@ export const redactCredentialWithPasswordType = (
* API/Embed + UI:
* (3) Hard-coded sessionId in UI
* (4) Not specified on UI nor API, default to chatId
* @param {any} instance
* @param {IReactFlowNode | undefined} memoryNode
* @param {IncomingInput} incomingInput
* @param {string} chatId
* @param {boolean} isInternal
* @returns {string}
*/
export const getMemorySessionId = (
memoryNode: IReactFlowNode,
memoryNode: IReactFlowNode | undefined,
incomingInput: IncomingInput,
chatId: string,
isInternal: boolean
): string | undefined => {
): string => {
if (!isInternal) {
// Provided in API body - incomingInput.overrideConfig: { sessionId: 'abc' }
if (incomingInput.overrideConfig?.sessionId) {
@ -1166,7 +1168,7 @@ export const getMemorySessionId = (
}
// Hard-coded sessionId in UI
if (memoryNode.data.inputs?.sessionId) {
if (memoryNode && memoryNode.data.inputs?.sessionId) {
return memoryNode.data.inputs.sessionId
}
@ -1175,18 +1177,21 @@ export const getMemorySessionId = (
}
/**
* Replace chatHistory if incomingInput.history is empty and sessionId/chatId is provided
* Get chat messages from sessionId
* @param {IReactFlowNode} memoryNode
* @param {IncomingInput} incomingInput
* @param {string} sessionId
* @param {IReactFlowNode} memoryNode
* @param {IComponentNodes} componentNodes
* @param {DataSource} appDataSource
* @param {IDatabaseEntity} databaseEntities
* @param {any} logger
* @returns {string}
* @returns {IMessage[]}
*/
export const getSessionChatHistory = async (
chatflowid: string,
sessionId: string,
memoryNode: IReactFlowNode,
componentNodes: IComponentNodes,
incomingInput: IncomingInput,
appDataSource: DataSource,
databaseEntities: IDatabaseEntity,
logger: any
@ -1196,19 +1201,18 @@ export const getSessionChatHistory = async (
const newNodeInstance = new nodeModule.nodeClass()
// Replace memory's sessionId/chatId
if (incomingInput.overrideConfig?.sessionId && memoryNode.data.inputs) {
memoryNode.data.inputs.sessionId = incomingInput.overrideConfig.sessionId
} else if (incomingInput.chatId && memoryNode.data.inputs) {
memoryNode.data.inputs.sessionId = incomingInput.chatId
if (memoryNode.data.inputs) {
memoryNode.data.inputs.sessionId = sessionId
}
const initializedInstance: FlowiseMemory = await newNodeInstance.init(memoryNode.data, '', {
chatflowid,
appDataSource,
databaseEntities,
logger
})
return (await initializedInstance.getChatMessages()) as IMessage[]
return (await initializedInstance.getChatMessages(sessionId)) as IMessage[]
}
/**
@ -1216,7 +1220,7 @@ export const getSessionChatHistory = async (
* In a chatflow, there should only be 1 memory node
* @param {IReactFlowNode[]} nodes
* @param {IReactFlowEdge[]} edges
* @returns {string | undefined}
* @returns {IReactFlowNode | undefined}
*/
export const findMemoryNode = (nodes: IReactFlowNode[], edges: IReactFlowEdge[]): IReactFlowNode | undefined => {
const memoryNodes = nodes.filter((node) => node.data.category === 'Memory')
@ -1228,6 +1232,7 @@ export const findMemoryNode = (nodes: IReactFlowNode[], edges: IReactFlowEdge[])
return memoryNode
}
}
return undefined
}

View File

@ -1,7 +1,7 @@
import { Request } from 'express'
import * as fs from 'fs'
import { cloneDeep, omit } from 'lodash'
import { ICommonObject } from 'flowise-components'
import { ICommonObject, IMessage } from 'flowise-components'
import telemetryService from '../services/telemetry'
import logger from '../utils/logger'
import {
@ -66,7 +66,6 @@ export const upsertVector = async (req: Request, isInternal: boolean = false) =>
incomingInput = {
question: req.body.question ?? 'hello',
overrideConfig,
history: [],
stopNodeId: req.body.stopNodeId
}
}
@ -78,14 +77,13 @@ export const upsertVector = async (req: Request, isInternal: boolean = false) =>
const edges = parsedFlowData.edges
let stopNodeId = incomingInput?.stopNodeId ?? ''
let chatHistory = incomingInput?.history
let chatHistory: IMessage[] = []
let chatId = incomingInput.chatId ?? ''
let isUpsert = true
// Get session ID
const memoryNode = findMemoryNode(nodes, edges)
let sessionId = undefined
if (memoryNode) sessionId = getMemorySessionId(memoryNode, incomingInput, chatId, isInternal)
let sessionId = getMemorySessionId(memoryNode, incomingInput, chatId, isInternal)
const vsNodes = nodes.filter(
(node) =>

View File

@ -519,11 +519,10 @@ export const formatDataGridRows = (rows) => {
}
}
export const setLocalStorageChatflow = (chatflowid, chatId, chatHistory) => {
export const setLocalStorageChatflow = (chatflowid, chatId) => {
const chatDetails = localStorage.getItem(`${chatflowid}_INTERNAL`)
const obj = {}
if (chatId) obj.chatId = chatId
if (chatHistory) obj.chatHistory = chatHistory
if (!chatDetails) {
localStorage.setItem(`${chatflowid}_INTERNAL`, JSON.stringify(obj))

View File

@ -392,11 +392,10 @@ export const ChatMessage = ({ open, chatflowid, isDialog, previews, setPreviews
clearPreviews()
setMessages((prevMessages) => [...prevMessages, { message: input, type: 'userMessage', fileUploads: urls }])
// Send user question and history to API
// Send user question to Prediction Internal API
try {
const params = {
question: input,
history: messages.filter((msg) => msg.message !== 'Hi there! How can I help?'),
chatId
}
if (urls && urls.length > 0) params.uploads = urls
@ -447,7 +446,7 @@ export const ChatMessage = ({ open, chatflowid, isDialog, previews, setPreviews
}
])
}
setLocalStorageChatflow(chatflowid, data.chatId, messages)
setLocalStorageChatflow(chatflowid, data.chatId)
setLoading(false)
setUserInput('')
setTimeout(() => {
@ -520,7 +519,7 @@ export const ChatMessage = ({ open, chatflowid, isDialog, previews, setPreviews
return obj
})
setMessages((prevMessages) => [...prevMessages, ...loadedMessages])
setLocalStorageChatflow(chatflowid, chatId, messages)
setLocalStorageChatflow(chatflowid, chatId)
}
// eslint-disable-next-line react-hooks/exhaustive-deps

View File

@ -129,7 +129,16 @@ const MarketplaceCanvasNode = ({ data }) => {
<NodeInputHandler disabled={true} key={index} inputParam={inputParam} data={data} />
))}
{data.inputParams.find((param) => param.additionalParams) && (
<div style={{ textAlign: 'center' }}>
<div
style={{
textAlign: 'center',
marginTop:
data.inputParams.filter((param) => param.additionalParams).length ===
data.inputParams.length + data.inputAnchors.length
? 20
: 0
}}
>
<Button sx={{ borderRadius: 25, width: '90%', mb: 2 }} variant='outlined' onClick={onDialogClicked}>
Additional Parameters
</Button>