diff --git a/package.json b/package.json index 909eb3453..804c3c968 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "flowise", - "version": "1.4.2", + "version": "1.4.5", "private": true, "homepage": "https://flowiseai.com", "workspaces": [ diff --git a/packages/components/credentials/LangfuseApi.credential.ts b/packages/components/credentials/LangfuseApi.credential.ts index 452ca9897..923af5177 100644 --- a/packages/components/credentials/LangfuseApi.credential.ts +++ b/packages/components/credentials/LangfuseApi.credential.ts @@ -12,7 +12,7 @@ class LangfuseApi implements INodeCredential { this.name = 'langfuseApi' this.version = 1.0 this.description = - 'Refer to official guide on how to get API key on Langfuse' + 'Refer to integration guide on how to get API keys on Langfuse' this.inputs = [ { label: 'Secret Key', diff --git a/packages/components/credentials/RedisCacheApi.credential.ts b/packages/components/credentials/RedisCacheApi.credential.ts index e09a94e7a..4d1a2498f 100644 --- a/packages/components/credentials/RedisCacheApi.credential.ts +++ b/packages/components/credentials/RedisCacheApi.credential.ts @@ -8,7 +8,7 @@ class RedisCacheApi implements INodeCredential { inputs: INodeParams[] constructor() { - this.label = 'Redis Cache API' + this.label = 'Redis API' this.name = 'redisCacheApi' this.version = 1.0 this.inputs = [ diff --git a/packages/components/credentials/RedisCacheUrlApi.credential.ts b/packages/components/credentials/RedisCacheUrlApi.credential.ts index fc2e2eb26..e016d78f7 100644 --- a/packages/components/credentials/RedisCacheUrlApi.credential.ts +++ b/packages/components/credentials/RedisCacheUrlApi.credential.ts @@ -8,7 +8,7 @@ class RedisCacheUrlApi implements INodeCredential { inputs: INodeParams[] constructor() { - this.label = 'Redis Cache URL' + this.label = 'Redis URL' this.name = 'redisCacheUrlApi' this.version = 1.0 this.inputs = [ @@ -16,7 +16,7 @@ class RedisCacheUrlApi implements INodeCredential { label: 'Redis URL', name: 'redisUrl', type: 'string', - default: '127.0.0.1' + default: 'redis://localhost:6379' } ] } diff --git a/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts b/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts index 00f825d44..8a2329b58 100644 --- a/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts +++ b/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts @@ -3,7 +3,7 @@ import { initializeAgentExecutorWithOptions, AgentExecutor, InitializeAgentExecu import { Tool } from 'langchain/tools' import { BaseChatMemory } from 'langchain/memory' import { getBaseClasses, mapChatHistory } from '../../../src/utils' -import { BaseLanguageModel } from 'langchain/base_language' +import { BaseChatModel } from 'langchain/chat_models/base' import { flatten } from 'lodash' import { additionalCallbacks } from '../../../src/handler' @@ -29,7 +29,7 @@ class ConversationalAgent_Agents implements INode { constructor() { this.label = 'Conversational Agent' this.name = 'conversationalAgent' - this.version = 1.0 + this.version = 2.0 this.type = 'AgentExecutor' this.category = 'Agents' this.icon = 'agent.svg' @@ -45,7 +45,7 @@ class ConversationalAgent_Agents implements INode { { label: 'Language Model', name: 'model', - type: 'BaseLanguageModel' + type: 'BaseChatModel' }, { label: 'Memory', @@ -65,7 +65,7 @@ class ConversationalAgent_Agents implements INode { } async init(nodeData: INodeData): Promise { - const model = nodeData.inputs?.model as BaseLanguageModel + const model = nodeData.inputs?.model as BaseChatModel let tools = nodeData.inputs?.tools as Tool[] tools = flatten(tools) const memory = nodeData.inputs?.memory as BaseChatMemory @@ -92,8 +92,6 @@ class ConversationalAgent_Agents implements INode { const executor = nodeData.instance as AgentExecutor const memory = nodeData.inputs?.memory as BaseChatMemory - const callbacks = await additionalCallbacks(nodeData, options) - if (options && options.chatHistory) { const chatHistoryClassName = memory.chatHistory.constructor.name // Only replace when its In-Memory @@ -103,6 +101,10 @@ class ConversationalAgent_Agents implements INode { } } + ;(executor.memory as any).returnMessages = true // Return true for BaseChatModel + + const callbacks = await additionalCallbacks(nodeData, options) + const result = await executor.call({ input }, [...callbacks]) return result?.output } diff --git a/packages/components/nodes/agents/ConversationalRetrievalAgent/ConversationalRetrievalAgent.ts b/packages/components/nodes/agents/ConversationalRetrievalAgent/ConversationalRetrievalAgent.ts index 4a908d7fe..643c6a658 100644 --- a/packages/components/nodes/agents/ConversationalRetrievalAgent/ConversationalRetrievalAgent.ts +++ b/packages/components/nodes/agents/ConversationalRetrievalAgent/ConversationalRetrievalAgent.ts @@ -21,7 +21,7 @@ class ConversationalRetrievalAgent_Agents implements INode { constructor() { this.label = 'Conversational Retrieval Agent' this.name = 'conversationalRetrievalAgent' - this.version = 1.0 + this.version = 3.0 this.type = 'AgentExecutor' this.category = 'Agents' this.icon = 'agent.svg' @@ -40,9 +40,9 @@ class ConversationalRetrievalAgent_Agents implements INode { type: 'BaseChatMemory' }, { - label: 'OpenAI Chat Model', + label: 'OpenAI/Azure Chat Model', name: 'model', - type: 'ChatOpenAI' + type: 'BaseChatModel' }, { label: 'System Message', @@ -82,6 +82,8 @@ class ConversationalRetrievalAgent_Agents implements INode { if (executor.memory) { ;(executor.memory as any).memoryKey = 'chat_history' ;(executor.memory as any).outputKey = 'output' + ;(executor.memory as any).returnMessages = true + const chatHistoryClassName = (executor.memory as any).chatHistory.constructor.name // Only replace when its In-Memory if (chatHistoryClassName && chatHistoryClassName === 'ChatMessageHistory') { diff --git a/packages/components/nodes/agents/OpenAIAssistant/OpenAIAssistant.ts b/packages/components/nodes/agents/OpenAIAssistant/OpenAIAssistant.ts index b119599dc..d44263948 100644 --- a/packages/components/nodes/agents/OpenAIAssistant/OpenAIAssistant.ts +++ b/packages/components/nodes/agents/OpenAIAssistant/OpenAIAssistant.ts @@ -8,6 +8,7 @@ import * as path from 'node:path' import fetch from 'node-fetch' import { flatten, uniqWith, isEqual } from 'lodash' import { zodToJsonSchema } from 'zod-to-json-schema' +import { AnalyticHandler } from '../../../src/handler' class OpenAIAssistant_Agents implements INode { label: string @@ -23,7 +24,7 @@ class OpenAIAssistant_Agents implements INode { constructor() { this.label = 'OpenAI Assistant' this.name = 'openAIAssistant' - this.version = 1.0 + this.version = 2.0 this.type = 'OpenAIAssistant' this.category = 'Agents' this.icon = 'openai.png' @@ -41,6 +42,15 @@ class OpenAIAssistant_Agents implements INode { name: 'tools', type: 'Tool', list: true + }, + { + label: 'Disable File Download', + name: 'disableFileDownload', + type: 'boolean', + description: + 'Messages can contain text, images, or files. In some cases, you may want to prevent others from downloading the files. Learn more from OpenAI File Annotation docs', + optional: true, + additionalParams: true } ] } @@ -76,49 +86,54 @@ class OpenAIAssistant_Agents implements INode { return null } - async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise { - const selectedAssistantId = nodeData.inputs?.selectedAssistant as string - const appDataSource = options.appDataSource as DataSource - const databaseEntities = options.databaseEntities as IDatabaseEntity - let sessionId = nodeData.inputs?.sessionId as string + //@ts-ignore + memoryMethods = { + async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise { + const selectedAssistantId = nodeData.inputs?.selectedAssistant as string + const appDataSource = options.appDataSource as DataSource + const databaseEntities = options.databaseEntities as IDatabaseEntity + let sessionId = nodeData.inputs?.sessionId as string - const assistant = await appDataSource.getRepository(databaseEntities['Assistant']).findOneBy({ - id: selectedAssistantId - }) - - if (!assistant) { - options.logger.error(`Assistant ${selectedAssistantId} not found`) - return - } - - if (!sessionId && options.chatId) { - const chatmsg = await appDataSource.getRepository(databaseEntities['ChatMessage']).findOneBy({ - chatId: options.chatId + const assistant = await appDataSource.getRepository(databaseEntities['Assistant']).findOneBy({ + id: selectedAssistantId }) - if (!chatmsg) { - options.logger.error(`Chat Message with Chat Id: ${options.chatId} not found`) + + if (!assistant) { + options.logger.error(`Assistant ${selectedAssistantId} not found`) return } - sessionId = chatmsg.sessionId - } - const credentialData = await getCredentialData(assistant.credential ?? '', options) - const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, nodeData) - if (!openAIApiKey) { - options.logger.error(`OpenAI ApiKey not found`) - return - } + if (!sessionId && options.chatId) { + const chatmsg = await appDataSource.getRepository(databaseEntities['ChatMessage']).findOneBy({ + chatId: options.chatId + }) + if (!chatmsg) { + options.logger.error(`Chat Message with Chat Id: ${options.chatId} not found`) + return + } + sessionId = chatmsg.sessionId + } - const openai = new OpenAI({ apiKey: openAIApiKey }) - options.logger.info(`Clearing OpenAI Thread ${sessionId}`) - if (sessionId) await openai.beta.threads.del(sessionId) - options.logger.info(`Successfully cleared OpenAI Thread ${sessionId}`) + const credentialData = await getCredentialData(assistant.credential ?? '', options) + const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, nodeData) + if (!openAIApiKey) { + options.logger.error(`OpenAI ApiKey not found`) + return + } + + const openai = new OpenAI({ apiKey: openAIApiKey }) + options.logger.info(`Clearing OpenAI Thread ${sessionId}`) + if (sessionId) await openai.beta.threads.del(sessionId) + options.logger.info(`Successfully cleared OpenAI Thread ${sessionId}`) + } } async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { const selectedAssistantId = nodeData.inputs?.selectedAssistant as string const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity + const disableFileDownload = nodeData.inputs?.disableFileDownload as boolean + let tools = nodeData.inputs?.tools tools = flatten(tools) const formattedTools = tools?.map((tool: any) => formatToOpenAIAssistantTool(tool)) ?? [] @@ -135,6 +150,11 @@ class OpenAIAssistant_Agents implements INode { const openai = new OpenAI({ apiKey: openAIApiKey }) + // Start analytics + const analyticHandlers = new AnalyticHandler(nodeData, options) + await analyticHandlers.init() + const parentIds = await analyticHandlers.onChainStart('OpenAIAssistant', input) + try { const assistantDetails = JSON.parse(assistant.details) const openAIAssistantId = assistantDetails.id @@ -157,7 +177,8 @@ class OpenAIAssistant_Agents implements INode { } const chatmessage = await appDataSource.getRepository(databaseEntities['ChatMessage']).findOneBy({ - chatId: options.chatId + chatId: options.chatId, + chatflowid: options.chatflowid }) let threadId = '' @@ -171,7 +192,7 @@ class OpenAIAssistant_Agents implements INode { threadId = thread.id } - // List all runs + // List all runs, in case existing thread is still running if (!isNewThread) { const promise = (threadId: string) => { return new Promise((resolve) => { @@ -207,6 +228,7 @@ class OpenAIAssistant_Agents implements INode { }) // Run assistant thread + const llmIds = await analyticHandlers.onLLMStart('ChatOpenAI', input, parentIds) const runThread = await openai.beta.threads.runs.create(threadId, { assistant_id: retrievedAssistant.id }) @@ -239,7 +261,15 @@ class OpenAIAssistant_Agents implements INode { for (let i = 0; i < actions.length; i += 1) { const tool = tools.find((tool: any) => tool.name === actions[i].tool) if (!tool) continue + + // Start tool analytics + const toolIds = await analyticHandlers.onToolStart(tool.name, actions[i].toolInput, parentIds) + const toolOutput = await tool.call(actions[i].toolInput) + + // End tool analytics + await analyticHandlers.onToolEnd(toolIds, toolOutput) + submitToolOutputs.push({ tool_call_id: actions[i].toolCallId, output: toolOutput @@ -288,7 +318,9 @@ class OpenAIAssistant_Agents implements INode { runThreadId = newRunThread.id state = await promise(threadId, newRunThread.id) } else { - throw new Error(`Error processing thread: ${state}, Thread ID: ${threadId}`) + const errMsg = `Error processing thread: ${state}, Thread ID: ${threadId}` + await analyticHandlers.onChainError(parentIds, errMsg) + throw new Error(errMsg) } } @@ -310,7 +342,7 @@ class OpenAIAssistant_Agents implements INode { const dirPath = path.join(getUserHome(), '.flowise', 'openai-assistant') - // Iterate over the annotations and add footnotes + // Iterate over the annotations for (let index = 0; index < annotations.length; index++) { const annotation = annotations[index] let filePath = '' @@ -323,11 +355,13 @@ class OpenAIAssistant_Agents implements INode { // eslint-disable-next-line no-useless-escape const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename filePath = path.join(getUserHome(), '.flowise', 'openai-assistant', fileName) - await downloadFile(cited_file, filePath, dirPath, openAIApiKey) - fileAnnotations.push({ - filePath, - fileName - }) + if (!disableFileDownload) { + await downloadFile(cited_file, filePath, dirPath, openAIApiKey) + fileAnnotations.push({ + filePath, + fileName + }) + } } else { const file_path = (annotation as OpenAI.Beta.Threads.Messages.MessageContentText.Text.FilePath).file_path if (file_path) { @@ -335,22 +369,30 @@ class OpenAIAssistant_Agents implements INode { // eslint-disable-next-line no-useless-escape const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename filePath = path.join(getUserHome(), '.flowise', 'openai-assistant', fileName) - await downloadFile(cited_file, filePath, dirPath, openAIApiKey) - fileAnnotations.push({ - filePath, - fileName - }) + if (!disableFileDownload) { + await downloadFile(cited_file, filePath, dirPath, openAIApiKey) + fileAnnotations.push({ + filePath, + fileName + }) + } } } // Replace the text with a footnote - message_content.value = message_content.value.replace(`${annotation.text}`, `${filePath}`) + message_content.value = message_content.value.replace( + `${annotation.text}`, + `${disableFileDownload ? '' : filePath}` + ) } returnVal += message_content.value } else { returnVal += content.text.value } + + const lenticularBracketRegex = /【[^】]*】/g + returnVal = returnVal.replace(lenticularBracketRegex, '') } else { const content = assistantMessages[0].content[i] as MessageContentImageFile const fileId = content.image_file.file_id @@ -363,11 +405,18 @@ class OpenAIAssistant_Agents implements INode { const bitmap = fsDefault.readFileSync(filePath) const base64String = Buffer.from(bitmap).toString('base64') + // TODO: Use a file path and retrieve image on the fly. Storing as base64 to localStorage and database will easily hit limits const imgHTML = `${fileObj.filename}
` returnVal += imgHTML } } + const imageRegex = /]*\/>/g + let llmOutput = returnVal.replace(imageRegex, '') + llmOutput = llmOutput.replace('
', '') + await analyticHandlers.onLLMEnd(llmIds, llmOutput) + await analyticHandlers.onChainEnd(parentIds, messageData, true) + return { text: returnVal, usedTools, @@ -375,6 +424,7 @@ class OpenAIAssistant_Agents implements INode { assistant: { assistantId: openAIAssistantId, threadId, runId: runThreadId, messages: messageData } } } catch (error) { + await analyticHandlers.onChainError(parentIds, error, true) throw new Error(error) } } diff --git a/packages/components/nodes/agents/OpenAIFunctionAgent/OpenAIFunctionAgent.ts b/packages/components/nodes/agents/OpenAIFunctionAgent/OpenAIFunctionAgent.ts index c920c399e..96ba7ea38 100644 --- a/packages/components/nodes/agents/OpenAIFunctionAgent/OpenAIFunctionAgent.ts +++ b/packages/components/nodes/agents/OpenAIFunctionAgent/OpenAIFunctionAgent.ts @@ -20,11 +20,11 @@ class OpenAIFunctionAgent_Agents implements INode { constructor() { this.label = 'OpenAI Function Agent' this.name = 'openAIFunctionAgent' - this.version = 1.0 + this.version = 3.0 this.type = 'AgentExecutor' this.category = 'Agents' this.icon = 'openai.png' - this.description = `An agent that uses OpenAI's Function Calling functionality to pick the tool and args to call` + this.description = `An agent that uses Function Calling to pick the tool and args to call` this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)] this.inputs = [ { @@ -39,10 +39,8 @@ class OpenAIFunctionAgent_Agents implements INode { type: 'BaseChatMemory' }, { - label: 'OpenAI Chat Model', + label: 'OpenAI/Azure Chat Model', name: 'model', - description: - 'Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer docs for more info', type: 'BaseChatModel' }, { @@ -89,6 +87,8 @@ class OpenAIFunctionAgent_Agents implements INode { } } + ;(executor.memory as any).returnMessages = true // Return true for BaseChatModel + const loggerHandler = new ConsoleCallbackHandler(options.logger) const callbacks = await additionalCallbacks(nodeData, options) diff --git a/packages/components/nodes/cache/RedisCache/RedisCache.ts b/packages/components/nodes/cache/RedisCache/RedisCache.ts index 3b68cf127..8128b6e32 100644 --- a/packages/components/nodes/cache/RedisCache/RedisCache.ts +++ b/packages/components/nodes/cache/RedisCache/RedisCache.ts @@ -89,7 +89,7 @@ class RedisCache implements INode { redisClient.update = async (prompt: string, llmKey: string, value: Generation[]) => { for (let i = 0; i < value.length; i += 1) { const key = getCacheKey(prompt, llmKey, String(i)) - if (ttl !== undefined) { + if (ttl) { await client.set(key, JSON.stringify(serializeGeneration(value[i])), 'EX', parseInt(ttl, 10)) } else { await client.set(key, JSON.stringify(serializeGeneration(value[i]))) diff --git a/packages/components/nodes/chains/ConversationChain/ConversationChain.ts b/packages/components/nodes/chains/ConversationChain/ConversationChain.ts index 92a0b5eab..7887ce97b 100644 --- a/packages/components/nodes/chains/ConversationChain/ConversationChain.ts +++ b/packages/components/nodes/chains/ConversationChain/ConversationChain.ts @@ -106,16 +106,18 @@ class ConversationChain_Chains implements INode { async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { const chain = nodeData.instance as ConversationChain const memory = nodeData.inputs?.memory as BufferMemory + memory.returnMessages = true // Return true for BaseChatModel if (options && options.chatHistory) { const chatHistoryClassName = memory.chatHistory.constructor.name // Only replace when its In-Memory if (chatHistoryClassName && chatHistoryClassName === 'ChatMessageHistory') { memory.chatHistory = mapChatHistory(options) - chain.memory = memory } } + chain.memory = memory + const loggerHandler = new ConsoleCallbackHandler(options.logger) const callbacks = await additionalCallbacks(nodeData, options) diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts index ee532a279..fd398151a 100644 --- a/packages/components/nodes/chains/LLMChain/LLMChain.ts +++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts @@ -1,7 +1,7 @@ import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' import { getBaseClasses, handleEscapeCharacters } from '../../../src/utils' import { LLMChain } from 'langchain/chains' -import { BaseLanguageModel } from 'langchain/base_language' +import { BaseLanguageModel, BaseLanguageModelCallOptions } from 'langchain/base_language' import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler' import { BaseOutputParser } from 'langchain/schema/output_parser' import { formatResponse, injectOutputParser } from '../../outputparsers/OutputParserHelpers' @@ -141,7 +141,7 @@ class LLMChain_Chains implements INode { const runPrediction = async ( inputVariables: string[], - chain: LLMChain, + chain: LLMChain>, input: string, promptValuesRaw: ICommonObject | undefined, options: ICommonObject, @@ -164,7 +164,7 @@ const runPrediction = async ( if (moderations && moderations.length > 0) { try { // Use the output of the moderation chain as input for the LLM chain - input = await checkInputs(moderations, chain.llm, input) + input = await checkInputs(moderations, input) } catch (e) { await new Promise((resolve) => setTimeout(resolve, 500)) streamResponse(isStreaming, e.message, socketIO, socketIOClientId) diff --git a/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts b/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts index ade46ab94..956fcdb33 100644 --- a/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts +++ b/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts @@ -27,7 +27,7 @@ class AWSChatBedrock_ChatModels implements INode { constructor() { this.label = 'AWS Bedrock' this.name = 'awsChatBedrock' - this.version = 2.0 + this.version = 3.0 this.type = 'AWSChatBedrock' this.icon = 'awsBedrock.png' this.category = 'Chat Models' @@ -97,7 +97,8 @@ class AWSChatBedrock_ChatModels implements INode { options: [ { label: 'anthropic.claude-instant-v1', name: 'anthropic.claude-instant-v1' }, { label: 'anthropic.claude-v1', name: 'anthropic.claude-v1' }, - { label: 'anthropic.claude-v2', name: 'anthropic.claude-v2' } + { label: 'anthropic.claude-v2', name: 'anthropic.claude-v2' }, + { label: 'meta.llama2-13b-chat-v1', name: 'meta.llama2-13b-chat-v1' } ], default: 'anthropic.claude-v2' }, @@ -128,12 +129,14 @@ class AWSChatBedrock_ChatModels implements INode { const iTemperature = nodeData.inputs?.temperature as string const iMax_tokens_to_sample = nodeData.inputs?.max_tokens_to_sample as string const cache = nodeData.inputs?.cache as BaseCache + const streaming = nodeData.inputs?.streaming as boolean const obj: BaseBedrockInput & BaseLLMParams = { region: iRegion, model: iModel, maxTokens: parseInt(iMax_tokens_to_sample, 10), - temperature: parseFloat(iTemperature) + temperature: parseFloat(iTemperature), + streaming: streaming ?? true } /** diff --git a/packages/components/nodes/documentloaders/S3File/S3File.ts b/packages/components/nodes/documentloaders/S3File/S3File.ts index 07295abaa..58ffd8af7 100644 --- a/packages/components/nodes/documentloaders/S3File/S3File.ts +++ b/packages/components/nodes/documentloaders/S3File/S3File.ts @@ -162,8 +162,11 @@ class S3_DocumentLoaders implements INode { accessKeyId?: string secretAccessKey?: string } = { - accessKeyId, - secretAccessKey + region, + credentials: { + accessKeyId, + secretAccessKey + } } loader.load = async () => { diff --git a/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts b/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts index ba2aa5e7d..8249d5121 100644 --- a/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts +++ b/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts @@ -18,7 +18,7 @@ class AWSBedrockEmbedding_Embeddings implements INode { constructor() { this.label = 'AWS Bedrock Embeddings' this.name = 'AWSBedrockEmbeddings' - this.version = 1.0 + this.version = 2.0 this.type = 'AWSBedrockEmbeddings' this.icon = 'awsBedrock.png' this.category = 'Embeddings' @@ -81,7 +81,9 @@ class AWSBedrockEmbedding_Embeddings implements INode { type: 'options', options: [ { label: 'amazon.titan-embed-text-v1', name: 'amazon.titan-embed-text-v1' }, - { label: 'amazon.titan-embed-g1-text-02', name: 'amazon.titan-embed-g1-text-02' } + { label: 'amazon.titan-embed-g1-text-02', name: 'amazon.titan-embed-g1-text-02' }, + { label: 'cohere.embed-english-v3', name: 'cohere.embed-english-v3' }, + { label: 'cohere.embed-multilingual-v3', name: 'cohere.embed-multilingual-v3' } ], default: 'amazon.titan-embed-text-v1' } diff --git a/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts b/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts index b67219f37..177a32ef9 100644 --- a/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts +++ b/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts @@ -27,7 +27,7 @@ class AWSBedrock_LLMs implements INode { constructor() { this.label = 'AWS Bedrock' this.name = 'awsBedrock' - this.version = 1.2 + this.version = 2.0 this.type = 'AWSBedrock' this.icon = 'awsBedrock.png' this.category = 'LLMs' @@ -98,6 +98,7 @@ class AWSBedrock_LLMs implements INode { { label: 'amazon.titan-tg1-large', name: 'amazon.titan-tg1-large' }, { label: 'amazon.titan-e1t-medium', name: 'amazon.titan-e1t-medium' }, { label: 'cohere.command-text-v14', name: 'cohere.command-text-v14' }, + { label: 'cohere.command-light-text-v14', name: 'cohere.command-light-text-v14' }, { label: 'ai21.j2-grande-instruct', name: 'ai21.j2-grande-instruct' }, { label: 'ai21.j2-jumbo-instruct', name: 'ai21.j2-jumbo-instruct' }, { label: 'ai21.j2-mid', name: 'ai21.j2-mid' }, diff --git a/packages/components/nodes/memory/DynamoDb/DynamoDb.ts b/packages/components/nodes/memory/DynamoDb/DynamoDb.ts index 68b09b7b2..8ca6cf9e5 100644 --- a/packages/components/nodes/memory/DynamoDb/DynamoDb.ts +++ b/packages/components/nodes/memory/DynamoDb/DynamoDb.ts @@ -1,4 +1,13 @@ -import { ICommonObject, INode, INodeData, INodeParams, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src' +import { + ICommonObject, + INode, + INodeData, + INodeParams, + getBaseClasses, + getCredentialData, + getCredentialParam, + serializeChatHistory +} from '../../../src' import { DynamoDBChatMessageHistory } from 'langchain/stores/message/dynamodb' import { BufferMemory, BufferMemoryInput } from 'langchain/memory' @@ -70,13 +79,23 @@ class DynamoDb_Memory implements INode { return initalizeDynamoDB(nodeData, options) } - async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise { - const dynamodbMemory = await initalizeDynamoDB(nodeData, options) - const sessionId = nodeData.inputs?.sessionId as string - const chatId = options?.chatId as string - options.logger.info(`Clearing DynamoDb memory session ${sessionId ? sessionId : chatId}`) - await dynamodbMemory.clear() - options.logger.info(`Successfully cleared DynamoDb memory session ${sessionId ? sessionId : chatId}`) + //@ts-ignore + memoryMethods = { + async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise { + const dynamodbMemory = await initalizeDynamoDB(nodeData, options) + const sessionId = nodeData.inputs?.sessionId as string + const chatId = options?.chatId as string + options.logger.info(`Clearing DynamoDb memory session ${sessionId ? sessionId : chatId}`) + await dynamodbMemory.clear() + options.logger.info(`Successfully cleared DynamoDb memory session ${sessionId ? sessionId : chatId}`) + }, + async getChatMessages(nodeData: INodeData, options: ICommonObject): Promise { + const memoryKey = nodeData.inputs?.memoryKey as string + const dynamodbMemory = await initalizeDynamoDB(nodeData, options) + const key = memoryKey ?? 'chat_history' + const memoryResult = await dynamodbMemory.loadMemoryVariables({}) + return serializeChatHistory(memoryResult[key]) + } } } @@ -109,9 +128,8 @@ const initalizeDynamoDB = async (nodeData: INodeData, options: ICommonObject): P }) const memory = new BufferMemoryExtended({ - memoryKey, + memoryKey: memoryKey ?? 'chat_history', chatHistory: dynamoDb, - returnMessages: true, isSessionIdUsingChatMessageId }) return memory diff --git a/packages/components/nodes/memory/MongoDBMemory/MongoDBMemory.ts b/packages/components/nodes/memory/MongoDBMemory/MongoDBMemory.ts index 7de2ec347..76cb7e313 100644 --- a/packages/components/nodes/memory/MongoDBMemory/MongoDBMemory.ts +++ b/packages/components/nodes/memory/MongoDBMemory/MongoDBMemory.ts @@ -1,4 +1,13 @@ -import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src' +import { + getBaseClasses, + getCredentialData, + getCredentialParam, + ICommonObject, + INode, + INodeData, + INodeParams, + serializeChatHistory +} from '../../../src' import { MongoDBChatMessageHistory } from 'langchain/stores/message/mongodb' import { BufferMemory, BufferMemoryInput } from 'langchain/memory' import { BaseMessage, mapStoredMessageToChatMessage } from 'langchain/schema' @@ -67,13 +76,23 @@ class MongoDB_Memory implements INode { return initializeMongoDB(nodeData, options) } - async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise { - const mongodbMemory = await initializeMongoDB(nodeData, options) - const sessionId = nodeData.inputs?.sessionId as string - const chatId = options?.chatId as string - options.logger.info(`Clearing MongoDB memory session ${sessionId ? sessionId : chatId}`) - await mongodbMemory.clear() - options.logger.info(`Successfully cleared MongoDB memory session ${sessionId ? sessionId : chatId}`) + //@ts-ignore + memoryMethods = { + async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise { + const mongodbMemory = await initializeMongoDB(nodeData, options) + const sessionId = nodeData.inputs?.sessionId as string + const chatId = options?.chatId as string + options.logger.info(`Clearing MongoDB memory session ${sessionId ? sessionId : chatId}`) + await mongodbMemory.clear() + options.logger.info(`Successfully cleared MongoDB memory session ${sessionId ? sessionId : chatId}`) + }, + async getChatMessages(nodeData: INodeData, options: ICommonObject): Promise { + const memoryKey = nodeData.inputs?.memoryKey as string + const mongodbMemory = await initializeMongoDB(nodeData, options) + const key = memoryKey ?? 'chat_history' + const memoryResult = await mongodbMemory.loadMemoryVariables({}) + return serializeChatHistory(memoryResult[key]) + } } } @@ -123,9 +142,8 @@ const initializeMongoDB = async (nodeData: INodeData, options: ICommonObject): P } return new BufferMemoryExtended({ - memoryKey, + memoryKey: memoryKey ?? 'chat_history', chatHistory: mongoDBChatMessageHistory, - returnMessages: true, isSessionIdUsingChatMessageId }) } diff --git a/packages/components/nodes/memory/MotorheadMemory/MotorheadMemory.ts b/packages/components/nodes/memory/MotorheadMemory/MotorheadMemory.ts index 0ec2f42ad..9cdbcd5cc 100644 --- a/packages/components/nodes/memory/MotorheadMemory/MotorheadMemory.ts +++ b/packages/components/nodes/memory/MotorheadMemory/MotorheadMemory.ts @@ -3,6 +3,7 @@ import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../ import { ICommonObject } from '../../../src' import { MotorheadMemory, MotorheadMemoryInput } from 'langchain/memory' import fetch from 'node-fetch' +import { getBufferString } from 'langchain/memory' class MotorMemory_Memory implements INode { label: string @@ -64,13 +65,23 @@ class MotorMemory_Memory implements INode { return initalizeMotorhead(nodeData, options) } - async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise { - const motorhead = await initalizeMotorhead(nodeData, options) - const sessionId = nodeData.inputs?.sessionId as string - const chatId = options?.chatId as string - options.logger.info(`Clearing Motorhead memory session ${sessionId ? sessionId : chatId}`) - await motorhead.clear() - options.logger.info(`Successfully cleared Motorhead memory session ${sessionId ? sessionId : chatId}`) + //@ts-ignore + memoryMethods = { + async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise { + const motorhead = await initalizeMotorhead(nodeData, options) + const sessionId = nodeData.inputs?.sessionId as string + const chatId = options?.chatId as string + options.logger.info(`Clearing Motorhead memory session ${sessionId ? sessionId : chatId}`) + await motorhead.clear() + options.logger.info(`Successfully cleared Motorhead memory session ${sessionId ? sessionId : chatId}`) + }, + async getChatMessages(nodeData: INodeData, options: ICommonObject): Promise { + const memoryKey = nodeData.inputs?.memoryKey as string + const motorhead = await initalizeMotorhead(nodeData, options) + const key = memoryKey ?? 'chat_history' + const memoryResult = await motorhead.loadMemoryVariables({}) + return getBufferString(memoryResult[key]) + } } } diff --git a/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts b/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts index c65d729b2..7fe447ad5 100644 --- a/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts +++ b/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts @@ -1,5 +1,5 @@ import { INode, INodeData, INodeParams, ICommonObject } from '../../../src/Interface' -import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getBaseClasses, getCredentialData, getCredentialParam, serializeChatHistory } from '../../../src/utils' import { BufferMemory, BufferMemoryInput } from 'langchain/memory' import { RedisChatMessageHistory, RedisChatMessageHistoryInput } from 'langchain/stores/message/ioredis' import { mapStoredMessageToChatMessage, BaseMessage } from 'langchain/schema' @@ -65,13 +65,23 @@ class RedisBackedChatMemory_Memory implements INode { return await initalizeRedis(nodeData, options) } - async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise { - const redis = await initalizeRedis(nodeData, options) - const sessionId = nodeData.inputs?.sessionId as string - const chatId = options?.chatId as string - options.logger.info(`Clearing Redis memory session ${sessionId ? sessionId : chatId}`) - await redis.clear() - options.logger.info(`Successfully cleared Redis memory session ${sessionId ? sessionId : chatId}`) + //@ts-ignore + memoryMethods = { + async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise { + const redis = await initalizeRedis(nodeData, options) + const sessionId = nodeData.inputs?.sessionId as string + const chatId = options?.chatId as string + options.logger.info(`Clearing Redis memory session ${sessionId ? sessionId : chatId}`) + await redis.clear() + options.logger.info(`Successfully cleared Redis memory session ${sessionId ? sessionId : chatId}`) + }, + async getChatMessages(nodeData: INodeData, options: ICommonObject): Promise { + const memoryKey = nodeData.inputs?.memoryKey as string + const redis = await initalizeRedis(nodeData, options) + const key = memoryKey ?? 'chat_history' + const memoryResult = await redis.loadMemoryVariables({}) + return serializeChatHistory(memoryResult[key]) + } } } @@ -137,7 +147,7 @@ const initalizeRedis = async (nodeData: INodeData, options: ICommonObject): Prom } const memory = new BufferMemoryExtended({ - memoryKey, + memoryKey: memoryKey ?? 'chat_history', chatHistory: redisChatMessageHistory, isSessionIdUsingChatMessageId }) diff --git a/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts b/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts index 6b5fdf660..8bca04404 100644 --- a/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts +++ b/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts @@ -1,5 +1,5 @@ import { INode, INodeData, INodeParams } from '../../../src/Interface' -import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getBaseClasses, getCredentialData, getCredentialParam, serializeChatHistory } from '../../../src/utils' import { ICommonObject } from '../../../src' import { BufferMemory, BufferMemoryInput } from 'langchain/memory' import { UpstashRedisChatMessageHistory } from 'langchain/stores/message/upstash_redis' @@ -63,13 +63,22 @@ class UpstashRedisBackedChatMemory_Memory implements INode { return initalizeUpstashRedis(nodeData, options) } - async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise { - const redis = await initalizeUpstashRedis(nodeData, options) - const sessionId = nodeData.inputs?.sessionId as string - const chatId = options?.chatId as string - options.logger.info(`Clearing Upstash Redis memory session ${sessionId ? sessionId : chatId}`) - await redis.clear() - options.logger.info(`Successfully cleared Upstash Redis memory session ${sessionId ? sessionId : chatId}`) + //@ts-ignore + memoryMethods = { + async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise { + const redis = await initalizeUpstashRedis(nodeData, options) + const sessionId = nodeData.inputs?.sessionId as string + const chatId = options?.chatId as string + options.logger.info(`Clearing Upstash Redis memory session ${sessionId ? sessionId : chatId}`) + await redis.clear() + options.logger.info(`Successfully cleared Upstash Redis memory session ${sessionId ? sessionId : chatId}`) + }, + async getChatMessages(nodeData: INodeData, options: ICommonObject): Promise { + const redis = await initalizeUpstashRedis(nodeData, options) + const key = 'chat_history' + const memoryResult = await redis.loadMemoryVariables({}) + return serializeChatHistory(memoryResult[key]) + } } } @@ -95,6 +104,7 @@ const initalizeUpstashRedis = async (nodeData: INodeData, options: ICommonObject }) const memory = new BufferMemoryExtended({ + memoryKey: 'chat_history', chatHistory: redisChatMessageHistory, isSessionIdUsingChatMessageId }) diff --git a/packages/components/nodes/memory/ZepMemory/ZepMemory.ts b/packages/components/nodes/memory/ZepMemory/ZepMemory.ts index c44986444..e72a6704f 100644 --- a/packages/components/nodes/memory/ZepMemory/ZepMemory.ts +++ b/packages/components/nodes/memory/ZepMemory/ZepMemory.ts @@ -1,7 +1,7 @@ -import { SystemMessage } from 'langchain/schema' +import { ZepMemory, ZepMemoryInput } from 'langchain/memory/zep' +import { getBufferString, InputValues, MemoryVariables, OutputValues } from 'langchain/memory' import { INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' -import { ZepMemory, ZepMemoryInput } from 'langchain/memory/zep' import { ICommonObject } from '../../../src' class ZepMemory_Memory implements INode { @@ -19,7 +19,7 @@ class ZepMemory_Memory implements INode { constructor() { this.label = 'Zep Memory' this.name = 'ZepMemory' - this.version = 1.0 + this.version = 2.0 this.type = 'ZepMemory' this.icon = 'zep.png' this.category = 'Memory' @@ -40,17 +40,12 @@ class ZepMemory_Memory implements INode { type: 'string', default: 'http://127.0.0.1:8000' }, - { - label: 'Auto Summary', - name: 'autoSummary', - type: 'boolean', - default: true - }, { label: 'Session Id', name: 'sessionId', type: 'string', - description: 'If not specified, the first CHAT_MESSAGE_ID will be used as sessionId', + description: + 'If not specified, a random id will be used. Learn more', default: '', additionalParams: true, optional: true @@ -59,15 +54,10 @@ class ZepMemory_Memory implements INode { label: 'Size', name: 'k', type: 'number', - default: '10', - description: 'Window of size k to surface the last k back-and-forth to use as memory.' - }, - { - label: 'Auto Summary Template', - name: 'autoSummaryTemplate', - type: 'string', - default: 'This is the summary of the following conversation:\n{summary}', - additionalParams: true + placeholder: '10', + description: 'Window of size k to surface the last k back-and-forth to use as memory.', + additionalParams: true, + optional: true }, { label: 'AI Prefix', @@ -108,45 +98,28 @@ class ZepMemory_Memory implements INode { } async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { - const autoSummaryTemplate = nodeData.inputs?.autoSummaryTemplate as string - const autoSummary = nodeData.inputs?.autoSummary as boolean - - const k = nodeData.inputs?.k as string - - let zep = await initalizeZep(nodeData, options) - - // hack to support summary - let tmpFunc = zep.loadMemoryVariables - zep.loadMemoryVariables = async (values) => { - let data = await tmpFunc.bind(zep, values)() - if (autoSummary && zep.returnMessages && data[zep.memoryKey] && data[zep.memoryKey].length) { - const zepClient = await zep.zepClientPromise - const memory = await zepClient.memory.getMemory(zep.sessionId, parseInt(k, 10) ?? 10) - if (memory?.summary) { - let summary = autoSummaryTemplate.replace(/{summary}/g, memory.summary.content) - // eslint-disable-next-line no-console - console.log('[ZepMemory] auto summary:', summary) - data[zep.memoryKey].unshift(new SystemMessage(summary)) - } - } - // for langchain zep memory compatibility, or we will get "Missing value for input variable chat_history" - if (data instanceof Array) { - data = { - [zep.memoryKey]: data - } - } - return data - } - return zep + return await initalizeZep(nodeData, options) } - async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise { - const zep = await initalizeZep(nodeData, options) - const sessionId = nodeData.inputs?.sessionId as string - const chatId = options?.chatId as string - options.logger.info(`Clearing Zep memory session ${sessionId ? sessionId : chatId}`) - await zep.clear() - options.logger.info(`Successfully cleared Zep memory session ${sessionId ? sessionId : chatId}`) + //@ts-ignore + memoryMethods = { + async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise { + const zep = await initalizeZep(nodeData, options) + const sessionId = nodeData.inputs?.sessionId as string + const chatId = options?.chatId as string + options.logger.info(`Clearing Zep memory session ${sessionId ? sessionId : chatId}`) + await zep.clear() + options.logger.info(`Successfully cleared Zep memory session ${sessionId ? sessionId : chatId}`) + }, + async getChatMessages(nodeData: INodeData, options: ICommonObject): Promise { + const memoryKey = nodeData.inputs?.memoryKey as string + const aiPrefix = nodeData.inputs?.aiPrefix as string + const humanPrefix = nodeData.inputs?.humanPrefix as string + const zep = await initalizeZep(nodeData, options) + const key = memoryKey ?? 'chat_history' + const memoryResult = await zep.loadMemoryVariables({}) + return getBufferString(memoryResult[key], humanPrefix, aiPrefix) + } } } @@ -156,40 +129,72 @@ const initalizeZep = async (nodeData: INodeData, options: ICommonObject): Promis const humanPrefix = nodeData.inputs?.humanPrefix as string const memoryKey = nodeData.inputs?.memoryKey as string const inputKey = nodeData.inputs?.inputKey as string - const sessionId = nodeData.inputs?.sessionId as string + const k = nodeData.inputs?.k as string const chatId = options?.chatId as string let isSessionIdUsingChatMessageId = false - if (!sessionId && chatId) isSessionIdUsingChatMessageId = true + let sessionId = '' + + if (!nodeData.inputs?.sessionId && chatId) { + isSessionIdUsingChatMessageId = true + sessionId = chatId + } else { + sessionId = nodeData.inputs?.sessionId + } const credentialData = await getCredentialData(nodeData.credential ?? '', options) const apiKey = getCredentialParam('apiKey', credentialData, nodeData) - const obj: ZepMemoryInput & Partial = { + const obj: ZepMemoryInput & ZepMemoryExtendedInput = { baseURL, sessionId: sessionId ? sessionId : chatId, aiPrefix, humanPrefix, returnMessages: true, memoryKey, - inputKey + inputKey, + isSessionIdUsingChatMessageId, + k: k ? parseInt(k, 10) : undefined } if (apiKey) obj.apiKey = apiKey - if (isSessionIdUsingChatMessageId) obj.isSessionIdUsingChatMessageId = true return new ZepMemoryExtended(obj) } interface ZepMemoryExtendedInput { isSessionIdUsingChatMessageId: boolean + k?: number } class ZepMemoryExtended extends ZepMemory { isSessionIdUsingChatMessageId? = false + lastN?: number - constructor(fields: ZepMemoryInput & Partial) { + constructor(fields: ZepMemoryInput & ZepMemoryExtendedInput) { super(fields) this.isSessionIdUsingChatMessageId = fields.isSessionIdUsingChatMessageId + this.lastN = fields.k + } + + async loadMemoryVariables(values: InputValues, overrideSessionId = ''): Promise { + if (overrideSessionId) { + super.sessionId = overrideSessionId + } + return super.loadMemoryVariables({ ...values, lastN: this.lastN }) + } + + async saveContext(inputValues: InputValues, outputValues: OutputValues, overrideSessionId = ''): Promise { + if (overrideSessionId) { + super.sessionId = overrideSessionId + } + return super.saveContext(inputValues, outputValues) + } + + async clear(overrideSessionId = ''): Promise { + if (overrideSessionId) { + super.sessionId = overrideSessionId + } + return super.clear() } } diff --git a/packages/components/nodes/moderation/Moderation.ts b/packages/components/nodes/moderation/Moderation.ts index 9c40f55ab..9fd2bfde3 100644 --- a/packages/components/nodes/moderation/Moderation.ts +++ b/packages/components/nodes/moderation/Moderation.ts @@ -1,13 +1,12 @@ -import { BaseLanguageModel } from 'langchain/base_language' import { Server } from 'socket.io' export abstract class Moderation { - abstract checkForViolations(llm: BaseLanguageModel, input: string): Promise + abstract checkForViolations(input: string): Promise } -export const checkInputs = async (inputModerations: Moderation[], llm: BaseLanguageModel, input: string): Promise => { +export const checkInputs = async (inputModerations: Moderation[], input: string): Promise => { for (const moderation of inputModerations) { - input = await moderation.checkForViolations(llm, input) + input = await moderation.checkForViolations(input) } return input } diff --git a/packages/components/nodes/moderation/OpenAIModeration/OpenAIModeration.ts b/packages/components/nodes/moderation/OpenAIModeration/OpenAIModeration.ts index 5233f174f..85b279070 100644 --- a/packages/components/nodes/moderation/OpenAIModeration/OpenAIModeration.ts +++ b/packages/components/nodes/moderation/OpenAIModeration/OpenAIModeration.ts @@ -1,5 +1,5 @@ -import { INode, INodeData, INodeParams } from '../../../src/Interface' -import { getBaseClasses } from '../../../src' +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src' import { Moderation } from '../Moderation' import { OpenAIModerationRunner } from './OpenAIModerationRunner' @@ -12,6 +12,7 @@ class OpenAIModeration implements INode { icon: string category: string baseClasses: string[] + credential: INodeParams inputs: INodeParams[] constructor() { @@ -19,10 +20,16 @@ class OpenAIModeration implements INode { this.name = 'inputModerationOpenAI' this.version = 1.0 this.type = 'Moderation' - this.icon = 'openai-moderation.png' + this.icon = 'openai.png' this.category = 'Moderation' this.description = 'Check whether content complies with OpenAI usage policies.' this.baseClasses = [this.type, ...getBaseClasses(Moderation)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['openAIApi'] + } this.inputs = [ { label: 'Error Message', @@ -35,8 +42,11 @@ class OpenAIModeration implements INode { ] } - async init(nodeData: INodeData): Promise { - const runner = new OpenAIModerationRunner() + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, nodeData) + + const runner = new OpenAIModerationRunner(openAIApiKey) const moderationErrorMessage = nodeData.inputs?.moderationErrorMessage as string if (moderationErrorMessage) runner.setErrorMessage(moderationErrorMessage) return runner diff --git a/packages/components/nodes/moderation/OpenAIModeration/OpenAIModerationRunner.ts b/packages/components/nodes/moderation/OpenAIModeration/OpenAIModerationRunner.ts index c517f419a..3a3ec5502 100644 --- a/packages/components/nodes/moderation/OpenAIModeration/OpenAIModerationRunner.ts +++ b/packages/components/nodes/moderation/OpenAIModeration/OpenAIModerationRunner.ts @@ -1,18 +1,21 @@ import { Moderation } from '../Moderation' -import { BaseLanguageModel } from 'langchain/base_language' import { OpenAIModerationChain } from 'langchain/chains' export class OpenAIModerationRunner implements Moderation { + private openAIApiKey = '' private moderationErrorMessage: string = "Text was found that violates OpenAI's content policy." - async checkForViolations(llm: BaseLanguageModel, input: string): Promise { - const openAIApiKey = (llm as any).openAIApiKey - if (!openAIApiKey) { + constructor(openAIApiKey: string) { + this.openAIApiKey = openAIApiKey + } + + async checkForViolations(input: string): Promise { + if (!this.openAIApiKey) { throw Error('OpenAI API key not found') } // Create a new instance of the OpenAIModerationChain const moderation = new OpenAIModerationChain({ - openAIApiKey: openAIApiKey, + openAIApiKey: this.openAIApiKey, throwError: false // If set to true, the call will throw an error when the moderation chain detects violating content. If set to false, violating content will return "Text was found that violates OpenAI's content policy.". }) // Send the user's input to the moderation chain and wait for the result diff --git a/packages/components/nodes/moderation/OpenAIModeration/openai-moderation.png b/packages/components/nodes/moderation/OpenAIModeration/openai-moderation.png deleted file mode 100644 index e3b1b282a..000000000 Binary files a/packages/components/nodes/moderation/OpenAIModeration/openai-moderation.png and /dev/null differ diff --git a/packages/components/nodes/moderation/OpenAIModeration/openai.png b/packages/components/nodes/moderation/OpenAIModeration/openai.png new file mode 100644 index 000000000..de08a05b2 Binary files /dev/null and b/packages/components/nodes/moderation/OpenAIModeration/openai.png differ diff --git a/packages/components/nodes/moderation/SimplePromptModeration/SimplePromptModerationRunner.ts b/packages/components/nodes/moderation/SimplePromptModeration/SimplePromptModerationRunner.ts index 7fc251ad4..08f9ed1ed 100644 --- a/packages/components/nodes/moderation/SimplePromptModeration/SimplePromptModerationRunner.ts +++ b/packages/components/nodes/moderation/SimplePromptModeration/SimplePromptModerationRunner.ts @@ -1,5 +1,4 @@ import { Moderation } from '../Moderation' -import { BaseLanguageModel } from 'langchain/base_language' export class SimplePromptModerationRunner implements Moderation { private readonly denyList: string = '' @@ -13,9 +12,9 @@ export class SimplePromptModerationRunner implements Moderation { this.moderationErrorMessage = moderationErrorMessage } - async checkForViolations(_: BaseLanguageModel, input: string): Promise { + async checkForViolations(input: string): Promise { this.denyList.split('\n').forEach((denyListItem) => { - if (denyListItem && denyListItem !== '' && input.includes(denyListItem)) { + if (denyListItem && denyListItem !== '' && input.toLowerCase().includes(denyListItem.toLowerCase())) { throw Error(this.moderationErrorMessage) } }) diff --git a/packages/components/nodes/outputparsers/OutputParserHelpers.ts b/packages/components/nodes/outputparsers/OutputParserHelpers.ts index a94edddd3..8ea77e6bf 100644 --- a/packages/components/nodes/outputparsers/OutputParserHelpers.ts +++ b/packages/components/nodes/outputparsers/OutputParserHelpers.ts @@ -1,6 +1,6 @@ import { BaseOutputParser } from 'langchain/schema/output_parser' import { LLMChain } from 'langchain/chains' -import { BaseLanguageModel } from 'langchain/base_language' +import { BaseLanguageModel, BaseLanguageModelCallOptions } from 'langchain/base_language' import { ICommonObject } from '../../src' import { ChatPromptTemplate, FewShotPromptTemplate, PromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts' @@ -15,7 +15,7 @@ export const formatResponse = (response: string | object): string | object => { export const injectOutputParser = ( outputParser: BaseOutputParser, - chain: LLMChain, + chain: LLMChain>, promptValues: ICommonObject | undefined = undefined ) => { if (outputParser && chain.prompt) { diff --git a/packages/components/nodes/retrievers/HydeRetriever/HydeRetriever.ts b/packages/components/nodes/retrievers/HydeRetriever/HydeRetriever.ts index 9ec7ada0c..10d9a6e7a 100644 --- a/packages/components/nodes/retrievers/HydeRetriever/HydeRetriever.ts +++ b/packages/components/nodes/retrievers/HydeRetriever/HydeRetriever.ts @@ -18,7 +18,7 @@ class HydeRetriever_Retrievers implements INode { constructor() { this.label = 'Hyde Retriever' this.name = 'HydeRetriever' - this.version = 1.0 + this.version = 2.0 this.type = 'HydeRetriever' this.icon = 'hyderetriever.svg' this.category = 'Retrievers' @@ -36,41 +36,66 @@ class HydeRetriever_Retrievers implements INode { type: 'VectorStore' }, { - label: 'Prompt Key', + label: 'Select Defined Prompt', name: 'promptKey', + description: 'Select a pre-defined prompt', type: 'options', options: [ { label: 'websearch', - name: 'websearch' + name: 'websearch', + description: `Please write a passage to answer the question +Question: {question} +Passage:` }, { label: 'scifact', - name: 'scifact' + name: 'scifact', + description: `Please write a scientific paper passage to support/refute the claim +Claim: {question} +Passage:` }, { label: 'arguana', - name: 'arguana' + name: 'arguana', + description: `Please write a counter argument for the passage +Passage: {question} +Counter Argument:` }, { label: 'trec-covid', - name: 'trec-covid' + name: 'trec-covid', + description: `Please write a scientific paper passage to answer the question +Question: {question} +Passage:` }, { label: 'fiqa', - name: 'fiqa' + name: 'fiqa', + description: `Please write a financial article passage to answer the question +Question: {question} +Passage:` }, { label: 'dbpedia-entity', - name: 'dbpedia-entity' + name: 'dbpedia-entity', + description: `Please write a passage to answer the question. +Question: {question} +Passage:` }, { label: 'trec-news', - name: 'trec-news' + name: 'trec-news', + description: `Please write a news passage about the topic. +Topic: {question} +Passage:` }, { label: 'mr-tydi', - name: 'mr-tydi' + name: 'mr-tydi', + description: `Please write a passage in Swahili/Korean/Japanese/Bengali to answer the question in detail. +Question: {question} +Passage:` } ], default: 'websearch' @@ -78,7 +103,7 @@ class HydeRetriever_Retrievers implements INode { { label: 'Custom Prompt', name: 'customPrompt', - description: 'If custom prompt is used, this will override Prompt Key', + description: 'If custom prompt is used, this will override Defined Prompt', placeholder: 'Please write a passage to answer the question\nQuestion: {question}\nPassage:', type: 'string', rows: 4, diff --git a/packages/components/nodes/tools/ChainTool/core.ts b/packages/components/nodes/tools/ChainTool/core.ts index 6c3dba554..5520d6df0 100644 --- a/packages/components/nodes/tools/ChainTool/core.ts +++ b/packages/components/nodes/tools/ChainTool/core.ts @@ -1,5 +1,6 @@ import { DynamicTool, DynamicToolInput } from 'langchain/tools' import { BaseChain } from 'langchain/chains' +import { handleEscapeCharacters } from '../../../src/utils' export interface ChainToolInput extends Omit { chain: BaseChain @@ -14,7 +15,8 @@ export class ChainTool extends DynamicTool { func: async (input, runManager) => { // To enable LLM Chain which has promptValues if ((chain as any).prompt && (chain as any).prompt.promptValues) { - const values = await chain.call((chain as any).prompt.promptValues, runManager?.getChild()) + const promptValues = handleEscapeCharacters((chain as any).prompt.promptValues, true) + const values = await chain.call(promptValues, runManager?.getChild()) return values?.text } return chain.run(input, runManager?.getChild()) diff --git a/packages/components/nodes/tools/ZapierNLA/ZapierNLA.ts b/packages/components/nodes/tools/ZapierNLA/ZapierNLA.ts deleted file mode 100644 index 31ac989b5..000000000 --- a/packages/components/nodes/tools/ZapierNLA/ZapierNLA.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { ZapierNLAWrapper, ZapierNLAWrapperParams } from 'langchain/tools' -import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' -import { ZapierToolKit } from 'langchain/agents' -import { getCredentialData, getCredentialParam } from '../../../src' - -class ZapierNLA_Tools implements INode { - label: string - name: string - version: number - description: string - type: string - icon: string - category: string - badge: string - baseClasses: string[] - inputs: INodeParams[] - credential: INodeParams - - constructor() { - this.label = 'Zapier NLA' - this.name = 'zapierNLA' - this.version = 1.0 - this.type = 'ZapierNLA' - this.icon = 'zapier.svg' - this.category = 'Tools' - this.description = "Access to apps and actions on Zapier's platform through a natural language API interface" - this.badge = 'DEPRECATING' - this.inputs = [] - this.credential = { - label: 'Connect Credential', - name: 'credential', - type: 'credential', - credentialNames: ['zapierNLAApi'] - } - this.baseClasses = [this.type, 'Tool'] - } - - async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { - const credentialData = await getCredentialData(nodeData.credential ?? '', options) - const zapierNLAApiKey = getCredentialParam('zapierNLAApiKey', credentialData, nodeData) - - const obj: Partial = { - apiKey: zapierNLAApiKey - } - const zapier = new ZapierNLAWrapper(obj) - const toolkit = await ZapierToolKit.fromZapierNLAWrapper(zapier) - - return toolkit.tools - } -} - -module.exports = { nodeClass: ZapierNLA_Tools } diff --git a/packages/components/nodes/tools/ZapierNLA/zapier.svg b/packages/components/nodes/tools/ZapierNLA/zapier.svg deleted file mode 100644 index 6ed35f295..000000000 --- a/packages/components/nodes/tools/ZapierNLA/zapier.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts b/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts index 51394613e..620c3af7f 100644 --- a/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts +++ b/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts @@ -31,7 +31,8 @@ class InMemoryVectorStore_VectorStores implements INode { label: 'Document', name: 'document', type: 'Document', - list: true + list: true, + optional: true }, { label: 'Embeddings', diff --git a/packages/components/package.json b/packages/components/package.json index 1d4cea573..1874ca104 100644 --- a/packages/components/package.json +++ b/packages/components/package.json @@ -1,6 +1,6 @@ { "name": "flowise-components", - "version": "1.4.2", + "version": "1.4.6", "description": "Flowiseai Components", "main": "dist/src/index", "types": "dist/src/index.d.ts", @@ -21,8 +21,9 @@ "@aws-sdk/client-s3": "^3.427.0", "@dqbd/tiktoken": "^1.0.7", "@elastic/elasticsearch": "^8.9.0", - "@getzep/zep-js": "^0.6.3", - "@gomomento/sdk": "^1.40.2", + "@getzep/zep-js": "^0.9.0", + "@gomomento/sdk": "^1.51.1", + "@gomomento/sdk-core": "^1.51.1", "@google-ai/generativelanguage": "^0.2.1", "@huggingface/inference": "^2.6.1", "@notionhq/client": "^2.2.8", @@ -35,7 +36,7 @@ "@upstash/redis": "^1.22.1", "@zilliz/milvus2-sdk-node": "^2.2.24", "apify-client": "^2.7.1", - "axios": "^0.27.2", + "axios": "1.6.2", "cheerio": "^1.0.0-rc.12", "chromadb": "^1.5.11", "cohere-ai": "^6.2.0", @@ -49,9 +50,10 @@ "html-to-text": "^9.0.5", "husky": "^8.0.3", "ioredis": "^5.3.2", - "langchain": "^0.0.165", + "langchain": "^0.0.196", + "langfuse": "^1.2.0", "langfuse-langchain": "^1.0.31", - "langsmith": "^0.0.32", + "langsmith": "^0.0.49", "linkifyjs": "^4.1.1", "llmonitor": "^0.5.5", "mammoth": "^1.5.1", diff --git a/packages/components/src/Interface.ts b/packages/components/src/Interface.ts index af3042720..6752f9440 100644 --- a/packages/components/src/Interface.ts +++ b/packages/components/src/Interface.ts @@ -107,9 +107,12 @@ export interface INode extends INodeProperties { search: (nodeData: INodeData, options?: ICommonObject) => Promise delete: (nodeData: INodeData, options?: ICommonObject) => Promise } + memoryMethods?: { + clearSessionMemory: (nodeData: INodeData, options?: ICommonObject) => Promise + getChatMessages: (nodeData: INodeData, options?: ICommonObject) => Promise + } init?(nodeData: INodeData, input: string, options?: ICommonObject): Promise run?(nodeData: INodeData, input: string, options?: ICommonObject): Promise - clearSessionMemory?(nodeData: INodeData, options?: ICommonObject): Promise } export interface INodeData extends INodeProperties { diff --git a/packages/components/src/handler.ts b/packages/components/src/handler.ts index 456cf39c3..29aff3e2f 100644 --- a/packages/components/src/handler.ts +++ b/packages/components/src/handler.ts @@ -8,6 +8,10 @@ import { LLMonitorHandler } from 'langchain/callbacks/handlers/llmonitor' import { getCredentialData, getCredentialParam } from './utils' import { ICommonObject, INodeData } from './Interface' import CallbackHandler from 'langfuse-langchain' +import { RunTree, RunTreeConfig, Client as LangsmithClient } from 'langsmith' +import { Langfuse, LangfuseTraceClient, LangfuseSpanClient, LangfuseGenerationClient } from 'langfuse' +import monitor from 'llmonitor' +import { v4 as uuidv4 } from 'uuid' interface AgentRun extends Run { actions: AgentAction[] @@ -273,3 +277,488 @@ export const additionalCallbacks = async (nodeData: INodeData, options: ICommonO throw new Error(e) } } + +export class AnalyticHandler { + nodeData: INodeData + options: ICommonObject = {} + handlers: ICommonObject = {} + + constructor(nodeData: INodeData, options: ICommonObject) { + this.options = options + this.nodeData = nodeData + this.init() + } + + async init() { + try { + if (!this.options.analytic) return + + const analytic = JSON.parse(this.options.analytic) + + for (const provider in analytic) { + const providerStatus = analytic[provider].status as boolean + + if (providerStatus) { + const credentialId = analytic[provider].credentialId as string + const credentialData = await getCredentialData(credentialId ?? '', this.options) + if (provider === 'langSmith') { + const langSmithProject = analytic[provider].projectName as string + const langSmithApiKey = getCredentialParam('langSmithApiKey', credentialData, this.nodeData) + const langSmithEndpoint = getCredentialParam('langSmithEndpoint', credentialData, this.nodeData) + + const client = new LangsmithClient({ + apiUrl: langSmithEndpoint ?? 'https://api.smith.langchain.com', + apiKey: langSmithApiKey + }) + + this.handlers['langSmith'] = { client, langSmithProject } + } else if (provider === 'langFuse') { + const release = analytic[provider].release as string + const langFuseSecretKey = getCredentialParam('langFuseSecretKey', credentialData, this.nodeData) + const langFusePublicKey = getCredentialParam('langFusePublicKey', credentialData, this.nodeData) + const langFuseEndpoint = getCredentialParam('langFuseEndpoint', credentialData, this.nodeData) + + const langfuse = new Langfuse({ + secretKey: langFuseSecretKey, + publicKey: langFusePublicKey, + baseUrl: langFuseEndpoint ?? 'https://cloud.langfuse.com', + release + }) + this.handlers['langFuse'] = { client: langfuse } + } else if (provider === 'llmonitor') { + const llmonitorAppId = getCredentialParam('llmonitorAppId', credentialData, this.nodeData) + const llmonitorEndpoint = getCredentialParam('llmonitorEndpoint', credentialData, this.nodeData) + + monitor.init({ + appId: llmonitorAppId, + apiUrl: llmonitorEndpoint + }) + + this.handlers['llmonitor'] = { client: monitor } + } + } + } + } catch (e) { + throw new Error(e) + } + } + + async onChainStart(name: string, input: string, parentIds?: ICommonObject) { + const returnIds: ICommonObject = { + langSmith: {}, + langFuse: {}, + llmonitor: {} + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) { + if (!parentIds || !Object.keys(parentIds).length) { + const parentRunConfig: RunTreeConfig = { + name, + run_type: 'chain', + inputs: { + text: input + }, + serialized: {}, + project_name: this.handlers['langSmith'].langSmithProject, + client: this.handlers['langSmith'].client + } + const parentRun = new RunTree(parentRunConfig) + await parentRun.postRun() + this.handlers['langSmith'].chainRun = { [parentRun.id]: parentRun } + returnIds['langSmith'].chainRun = parentRun.id + } else { + const parentRun: RunTree | undefined = this.handlers['langSmith'].chainRun[parentIds['langSmith'].chainRun] + if (parentRun) { + const childChainRun = await parentRun.createChild({ + name, + run_type: 'chain', + inputs: { + text: input + } + }) + await childChainRun.postRun() + this.handlers['langSmith'].chainRun = { [childChainRun.id]: childChainRun } + returnIds['langSmith'].chainRun = childChainRun.id + } + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) { + let langfuseTraceClient: LangfuseTraceClient + + if (!parentIds || !Object.keys(parentIds).length) { + const langfuse: Langfuse = this.handlers['langFuse'].client + langfuseTraceClient = langfuse.trace({ + name, + userId: this.options.chatId, + metadata: { tags: ['openai-assistant'] } + }) + } else { + langfuseTraceClient = this.handlers['langFuse'].trace[parentIds['langFuse']] + } + + if (langfuseTraceClient) { + const span = langfuseTraceClient.span({ + name, + input: { + text: input + } + }) + this.handlers['langFuse'].trace = { [langfuseTraceClient.id]: langfuseTraceClient } + this.handlers['langFuse'].span = { [span.id]: span } + returnIds['langFuse'].trace = langfuseTraceClient.id + returnIds['langFuse'].span = span.id + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) { + const monitor = this.handlers['llmonitor'].client + + if (monitor) { + const runId = uuidv4() + await monitor.trackEvent('chain', 'start', { + runId, + name, + userId: this.options.chatId, + input + }) + this.handlers['llmonitor'].chainEvent = { [runId]: runId } + returnIds['llmonitor'].chainEvent = runId + } + } + + return returnIds + } + + async onChainEnd(returnIds: ICommonObject, output: string | object, shutdown = false) { + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) { + const chainRun: RunTree | undefined = this.handlers['langSmith'].chainRun[returnIds['langSmith'].chainRun] + if (chainRun) { + await chainRun.end({ + outputs: { + output + } + }) + await chainRun.patchRun() + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) { + const span: LangfuseSpanClient | undefined = this.handlers['langFuse'].span[returnIds['langFuse'].span] + if (span) { + span.end({ + output + }) + if (shutdown) { + const langfuse: Langfuse = this.handlers['langFuse'].client + await langfuse.shutdownAsync() + } + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) { + const chainEventId = returnIds['llmonitor'].chainEvent + const monitor = this.handlers['llmonitor'].client + + if (monitor && chainEventId) { + await monitor.trackEvent('chain', 'end', { + runId: chainEventId, + output + }) + } + } + } + + async onChainError(returnIds: ICommonObject, error: string | object, shutdown = false) { + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) { + const chainRun: RunTree | undefined = this.handlers['langSmith'].chainRun[returnIds['langSmith'].chainRun] + if (chainRun) { + await chainRun.end({ + error: { + error + } + }) + await chainRun.patchRun() + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) { + const span: LangfuseSpanClient | undefined = this.handlers['langFuse'].span[returnIds['langFuse'].span] + if (span) { + span.end({ + output: { + error + } + }) + if (shutdown) { + const langfuse: Langfuse = this.handlers['langFuse'].client + await langfuse.shutdownAsync() + } + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) { + const chainEventId = returnIds['llmonitor'].chainEvent + const monitor = this.handlers['llmonitor'].client + + if (monitor && chainEventId) { + await monitor.trackEvent('chain', 'end', { + runId: chainEventId, + output: error + }) + } + } + } + + async onLLMStart(name: string, input: string, parentIds: ICommonObject) { + const returnIds: ICommonObject = { + langSmith: {}, + langFuse: {}, + llmonitor: {} + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) { + const parentRun: RunTree | undefined = this.handlers['langSmith'].chainRun[parentIds['langSmith'].chainRun] + if (parentRun) { + const childLLMRun = await parentRun.createChild({ + name, + run_type: 'llm', + inputs: { + prompts: [input] + } + }) + await childLLMRun.postRun() + this.handlers['langSmith'].llmRun = { [childLLMRun.id]: childLLMRun } + returnIds['langSmith'].llmRun = childLLMRun.id + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) { + const trace: LangfuseTraceClient | undefined = this.handlers['langFuse'].trace[parentIds['langFuse'].trace] + if (trace) { + const generation = trace.generation({ + name, + prompt: input + }) + this.handlers['langFuse'].generation = { [generation.id]: generation } + returnIds['langFuse'].generation = generation.id + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) { + const monitor = this.handlers['llmonitor'].client + const chainEventId: string = this.handlers['llmonitor'].chainEvent[parentIds['llmonitor'].chainEvent] + + if (monitor && chainEventId) { + const runId = uuidv4() + await monitor.trackEvent('llm', 'start', { + runId, + parentRunId: chainEventId, + name, + userId: this.options.chatId, + input + }) + this.handlers['llmonitor'].llmEvent = { [runId]: runId } + returnIds['llmonitor'].llmEvent = runId + } + } + + return returnIds + } + + async onLLMEnd(returnIds: ICommonObject, output: string) { + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) { + const llmRun: RunTree | undefined = this.handlers['langSmith'].llmRun[returnIds['langSmith'].llmRun] + if (llmRun) { + await llmRun.end({ + outputs: { + generations: [output] + } + }) + await llmRun.patchRun() + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) { + const generation: LangfuseGenerationClient | undefined = this.handlers['langFuse'].generation[returnIds['langFuse'].generation] + if (generation) { + generation.end({ + completion: output + }) + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) { + const llmEventId: string = this.handlers['llmonitor'].llmEvent[returnIds['llmonitor'].llmEvent] + const monitor = this.handlers['llmonitor'].client + + if (monitor && llmEventId) { + await monitor.trackEvent('llm', 'end', { + runId: llmEventId, + output + }) + } + } + } + + async onLLMError(returnIds: ICommonObject, error: string | object) { + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) { + const llmRun: RunTree | undefined = this.handlers['langSmith'].llmRun[returnIds['langSmith'].llmRun] + if (llmRun) { + await llmRun.end({ + error: { + error + } + }) + await llmRun.patchRun() + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) { + const generation: LangfuseGenerationClient | undefined = this.handlers['langFuse'].generation[returnIds['langFuse'].generation] + if (generation) { + generation.end({ + completion: error + }) + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) { + const llmEventId: string = this.handlers['llmonitor'].llmEvent[returnIds['llmonitor'].llmEvent] + const monitor = this.handlers['llmonitor'].client + + if (monitor && llmEventId) { + await monitor.trackEvent('llm', 'end', { + runId: llmEventId, + output: error + }) + } + } + } + + async onToolStart(name: string, input: string | object, parentIds: ICommonObject) { + const returnIds: ICommonObject = { + langSmith: {}, + langFuse: {}, + llmonitor: {} + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) { + const parentRun: RunTree | undefined = this.handlers['langSmith'].chainRun[parentIds['langSmith'].chainRun] + if (parentRun) { + const childToolRun = await parentRun.createChild({ + name, + run_type: 'tool', + inputs: { + input + } + }) + await childToolRun.postRun() + this.handlers['langSmith'].toolRun = { [childToolRun.id]: childToolRun } + returnIds['langSmith'].toolRun = childToolRun.id + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) { + const trace: LangfuseTraceClient | undefined = this.handlers['langFuse'].trace[parentIds['langFuse'].trace] + if (trace) { + const toolSpan = trace.span({ + name, + input + }) + this.handlers['langFuse'].toolSpan = { [toolSpan.id]: toolSpan } + returnIds['langFuse'].toolSpan = toolSpan.id + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) { + const monitor = this.handlers['llmonitor'].client + const chainEventId: string = this.handlers['llmonitor'].chainEvent[parentIds['llmonitor'].chainEvent] + + if (monitor && chainEventId) { + const runId = uuidv4() + await monitor.trackEvent('tool', 'start', { + runId, + parentRunId: chainEventId, + name, + userId: this.options.chatId, + input + }) + this.handlers['llmonitor'].toolEvent = { [runId]: runId } + returnIds['llmonitor'].toolEvent = runId + } + } + + return returnIds + } + + async onToolEnd(returnIds: ICommonObject, output: string | object) { + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) { + const toolRun: RunTree | undefined = this.handlers['langSmith'].toolRun[returnIds['langSmith'].toolRun] + if (toolRun) { + await toolRun.end({ + outputs: { + output + } + }) + await toolRun.patchRun() + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) { + const toolSpan: LangfuseSpanClient | undefined = this.handlers['langFuse'].toolSpan[returnIds['langFuse'].toolSpan] + if (toolSpan) { + toolSpan.end({ + output + }) + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) { + const toolEventId: string = this.handlers['llmonitor'].toolEvent[returnIds['llmonitor'].toolEvent] + const monitor = this.handlers['llmonitor'].client + + if (monitor && toolEventId) { + await monitor.trackEvent('tool', 'end', { + runId: toolEventId, + output + }) + } + } + } + + async onToolError(returnIds: ICommonObject, error: string | object) { + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) { + const toolRun: RunTree | undefined = this.handlers['langSmith'].toolRun[returnIds['langSmith'].toolRun] + if (toolRun) { + await toolRun.end({ + error: { + error + } + }) + await toolRun.patchRun() + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) { + const toolSpan: LangfuseSpanClient | undefined = this.handlers['langFuse'].toolSpan[returnIds['langFuse'].toolSpan] + if (toolSpan) { + toolSpan.end({ + output: error + }) + } + } + + if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) { + const toolEventId: string = this.handlers['llmonitor'].llmEvent[returnIds['llmonitor'].toolEvent] + const monitor = this.handlers['llmonitor'].client + + if (monitor && toolEventId) { + await monitor.trackEvent('tool', 'end', { + runId: toolEventId, + output: error + }) + } + } + } +} diff --git a/packages/components/src/utils.ts b/packages/components/src/utils.ts index 69f8b268e..404f7c75d 100644 --- a/packages/components/src/utils.ts +++ b/packages/components/src/utils.ts @@ -549,6 +549,18 @@ export const convertChatHistoryToText = (chatHistory: IMessage[] = []): string = .join('\n') } +/** + * Serialize array chat history to string + * @param {IMessage[]} chatHistory + * @returns {string} + */ +export const serializeChatHistory = (chatHistory: string | Array) => { + if (Array.isArray(chatHistory)) { + return chatHistory.join('\n') + } + return chatHistory +} + /** * Convert schema to zod schema * @param {string | object} schema diff --git a/packages/server/marketplaces/chatflows/API Agent OpenAI.json b/packages/server/marketplaces/chatflows/API Agent OpenAI.json index 5498b4f36..002c08c18 100644 --- a/packages/server/marketplaces/chatflows/API Agent OpenAI.json +++ b/packages/server/marketplaces/chatflows/API Agent OpenAI.json @@ -3,7 +3,7 @@ "nodes": [ { "width": 300, - "height": 510, + "height": 491, "id": "openApiChain_1", "position": { "x": 1203.1825726424859, @@ -13,8 +13,8 @@ "data": { "id": "openApiChain_1", "label": "OpenAPI Chain", - "name": "openApiChain", "version": 1, + "name": "openApiChain", "type": "OpenAPIChain", "baseClasses": ["OpenAPIChain", "BaseChain"], "category": "Chains", @@ -78,7 +78,7 @@ }, { "width": 300, - "height": 523, + "height": 574, "id": "chatOpenAI_1", "position": { "x": 792.3201947594027, @@ -88,8 +88,8 @@ "data": { "id": "chatOpenAI_1", "label": "ChatOpenAI", - "name": "chatOpenAI", "version": 2, + "name": "chatOpenAI", "type": "ChatOpenAI", "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], "category": "Chat Models", @@ -259,8 +259,8 @@ "data": { "id": "chainTool_0", "label": "Chain Tool", - "name": "chainTool", "version": 1, + "name": "chainTool", "type": "ChainTool", "baseClasses": ["ChainTool", "DynamicTool", "Tool", "StructuredTool"], "category": "Tools", @@ -334,7 +334,7 @@ "id": "openAIFunctionAgent_0", "label": "OpenAI Function Agent", "name": "openAIFunctionAgent", - "version": 1, + "version": 3, "type": "AgentExecutor", "baseClasses": ["AgentExecutor", "BaseChain"], "category": "Agents", @@ -365,9 +365,8 @@ "id": "openAIFunctionAgent_0-input-memory-BaseChatMemory" }, { - "label": "OpenAI Chat Model", + "label": "OpenAI/Azure Chat Model", "name": "model", - "description": "Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer docs for more info", "type": "BaseChatModel", "id": "openAIFunctionAgent_0-input-model-BaseChatModel" } @@ -398,7 +397,7 @@ }, { "width": 300, - "height": 523, + "height": 574, "id": "chatOpenAI_2", "position": { "x": 1645.450699499575, @@ -408,8 +407,8 @@ "data": { "id": "chatOpenAI_2", "label": "ChatOpenAI", - "name": "chatOpenAI", "version": 2, + "name": "chatOpenAI", "type": "ChatOpenAI", "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], "category": "Chat Models", @@ -579,8 +578,8 @@ "data": { "id": "bufferMemory_0", "label": "Buffer Memory", - "name": "bufferMemory", "version": 1, + "name": "bufferMemory", "type": "BufferMemory", "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"], "category": "Memory", @@ -658,17 +657,6 @@ "label": "" } }, - { - "source": "chatOpenAI_2", - "sourceHandle": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "target": "openAIFunctionAgent_0", - "targetHandle": "openAIFunctionAgent_0-input-model-BaseChatModel", - "type": "buttonedge", - "id": "chatOpenAI_2-chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-openAIFunctionAgent_0-openAIFunctionAgent_0-input-model-BaseChatModel", - "data": { - "label": "" - } - }, { "source": "bufferMemory_0", "sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", @@ -679,6 +667,17 @@ "data": { "label": "" } + }, + { + "source": "chatOpenAI_2", + "sourceHandle": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", + "target": "openAIFunctionAgent_0", + "targetHandle": "openAIFunctionAgent_0-input-model-BaseChatModel", + "type": "buttonedge", + "id": "chatOpenAI_2-chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-openAIFunctionAgent_0-openAIFunctionAgent_0-input-model-BaseChatModel", + "data": { + "label": "" + } } ] } diff --git a/packages/server/marketplaces/chatflows/API Agent.json b/packages/server/marketplaces/chatflows/API Agent.json index d8fa22ad1..932708483 100644 --- a/packages/server/marketplaces/chatflows/API Agent.json +++ b/packages/server/marketplaces/chatflows/API Agent.json @@ -13,8 +13,8 @@ "data": { "id": "getApiChain_0", "label": "GET API Chain", - "name": "getApiChain", "version": 1, + "name": "getApiChain", "type": "GETApiChain", "baseClasses": ["GETApiChain", "BaseChain", "BaseLangChain"], "category": "Chains", @@ -102,8 +102,8 @@ "data": { "id": "chainTool_0", "label": "Chain Tool", - "name": "chainTool", "version": 1, + "name": "chainTool", "type": "ChainTool", "baseClasses": ["ChainTool", "DynamicTool", "Tool", "StructuredTool", "BaseLangChain"], "category": "Tools", @@ -176,8 +176,8 @@ "data": { "id": "bufferMemory_0", "label": "Buffer Memory", - "name": "bufferMemory", "version": 1, + "name": "bufferMemory", "type": "BufferMemory", "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"], "category": "Memory", @@ -233,8 +233,8 @@ "data": { "id": "chainTool_1", "label": "Chain Tool", - "name": "chainTool", "version": 1, + "name": "chainTool", "type": "ChainTool", "baseClasses": ["ChainTool", "DynamicTool", "Tool", "StructuredTool", "BaseLangChain"], "category": "Tools", @@ -307,8 +307,8 @@ "data": { "id": "postApiChain_0", "label": "POST API Chain", - "name": "postApiChain", "version": 1, + "name": "postApiChain", "type": "POSTApiChain", "baseClasses": ["POSTApiChain", "BaseChain", "BaseLangChain"], "category": "Chains", @@ -386,7 +386,7 @@ }, { "width": 300, - "height": 523, + "height": 574, "id": "chatOpenAI_2", "position": { "x": 572.8941615312035, @@ -396,8 +396,8 @@ "data": { "id": "chatOpenAI_2", "label": "ChatOpenAI", - "name": "chatOpenAI", "version": 2, + "name": "chatOpenAI", "type": "ChatOpenAI", "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], "category": "Chat Models", @@ -557,7 +557,7 @@ }, { "width": 300, - "height": 523, + "height": 574, "id": "chatOpenAI_1", "position": { "x": 828.7788305309582, @@ -567,8 +567,8 @@ "data": { "id": "chatOpenAI_1", "label": "ChatOpenAI", - "name": "chatOpenAI", "version": 2, + "name": "chatOpenAI", "type": "ChatOpenAI", "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], "category": "Chat Models", @@ -728,7 +728,7 @@ }, { "width": 300, - "height": 523, + "height": 574, "id": "chatOpenAI_3", "position": { "x": 1148.338912314111, @@ -738,8 +738,8 @@ "data": { "id": "chatOpenAI_3", "label": "ChatOpenAI", - "name": "chatOpenAI", "version": 2, + "name": "chatOpenAI", "type": "ChatOpenAI", "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], "category": "Chat Models", @@ -869,7 +869,7 @@ } ], "inputs": { - "modelName": "gpt-3.5-turbo", + "modelName": "gpt-3.5-turbo-16k", "temperature": 0.9, "maxTokens": "", "topP": "", @@ -902,17 +902,17 @@ "height": 383, "id": "conversationalAgent_0", "position": { - "x": 2114.071431691489, - "y": 941.7926368551367 + "x": 2090.570467632979, + "y": 969.5131357270544 }, "type": "customNode", "data": { "id": "conversationalAgent_0", "label": "Conversational Agent", + "version": 2, "name": "conversationalAgent", - "version": 1, "type": "AgentExecutor", - "baseClasses": ["AgentExecutor", "BaseChain"], + "baseClasses": ["AgentExecutor", "BaseChain", "Runnable"], "category": "Agents", "description": "Conversational agent for a chat model. It will utilize chat specific prompts", "inputParams": [ @@ -938,8 +938,8 @@ { "label": "Language Model", "name": "model", - "type": "BaseLanguageModel", - "id": "conversationalAgent_0-input-model-BaseLanguageModel" + "type": "BaseChatModel", + "id": "conversationalAgent_0-input-model-BaseChatModel" }, { "label": "Memory", @@ -956,21 +956,21 @@ }, "outputAnchors": [ { - "id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain", + "id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|Runnable", "name": "conversationalAgent", "label": "AgentExecutor", - "type": "AgentExecutor | BaseChain" + "type": "AgentExecutor | BaseChain | Runnable" } ], "outputs": {}, "selected": false }, "selected": false, - "dragging": false, "positionAbsolute": { - "x": 2114.071431691489, - "y": 941.7926368551367 - } + "x": 2090.570467632979, + "y": 969.5131357270544 + }, + "dragging": false } ], "edges": [ @@ -1044,9 +1044,9 @@ "source": "chatOpenAI_3", "sourceHandle": "chatOpenAI_3-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", "target": "conversationalAgent_0", - "targetHandle": "conversationalAgent_0-input-model-BaseLanguageModel", + "targetHandle": "conversationalAgent_0-input-model-BaseChatModel", "type": "buttonedge", - "id": "chatOpenAI_3-chatOpenAI_3-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseLanguageModel", + "id": "chatOpenAI_3-chatOpenAI_3-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseChatModel", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/Conversational Agent.json b/packages/server/marketplaces/chatflows/Conversational Agent.json index 2232ade06..8994594a1 100644 --- a/packages/server/marketplaces/chatflows/Conversational Agent.json +++ b/packages/server/marketplaces/chatflows/Conversational Agent.json @@ -13,8 +13,8 @@ "data": { "id": "calculator_1", "label": "Calculator", - "name": "calculator", "version": 1, + "name": "calculator", "type": "Calculator", "baseClasses": ["Calculator", "Tool", "StructuredTool", "BaseLangChain"], "category": "Tools", @@ -52,8 +52,8 @@ "data": { "id": "bufferMemory_1", "label": "Buffer Memory", - "name": "bufferMemory", "version": 1, + "name": "bufferMemory", "type": "BufferMemory", "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"], "category": "Memory", @@ -109,8 +109,8 @@ "data": { "id": "serpAPI_0", "label": "Serp API", - "name": "serpAPI", "version": 1, + "name": "serpAPI", "type": "SerpAPI", "baseClasses": ["SerpAPI", "Tool", "StructuredTool"], "category": "Tools", @@ -146,7 +146,7 @@ }, { "width": 300, - "height": 523, + "height": 574, "id": "chatOpenAI_0", "position": { "x": 97.01321406237057, @@ -156,8 +156,8 @@ "data": { "id": "chatOpenAI_0", "label": "ChatOpenAI", - "name": "chatOpenAI", "version": 2, + "name": "chatOpenAI", "type": "ChatOpenAI", "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], "category": "Chat Models", @@ -287,7 +287,7 @@ } ], "inputs": { - "modelName": "gpt-3.5-turbo", + "modelName": "gpt-3.5-turbo-16k", "temperature": 0.9, "maxTokens": "", "topP": "", @@ -320,17 +320,17 @@ "height": 383, "id": "conversationalAgent_0", "position": { - "x": 1164.4550359451973, - "y": 283.40041124403075 + "x": 1191.1524476753796, + "y": 324.2479396683294 }, "type": "customNode", "data": { "id": "conversationalAgent_0", "label": "Conversational Agent", + "version": 2, "name": "conversationalAgent", - "version": 1, "type": "AgentExecutor", - "baseClasses": ["AgentExecutor", "BaseChain"], + "baseClasses": ["AgentExecutor", "BaseChain", "Runnable"], "category": "Agents", "description": "Conversational agent for a chat model. It will utilize chat specific prompts", "inputParams": [ @@ -356,8 +356,8 @@ { "label": "Language Model", "name": "model", - "type": "BaseLanguageModel", - "id": "conversationalAgent_0-input-model-BaseLanguageModel" + "type": "BaseChatModel", + "id": "conversationalAgent_0-input-model-BaseChatModel" }, { "label": "Memory", @@ -374,10 +374,10 @@ }, "outputAnchors": [ { - "id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain", + "id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|Runnable", "name": "conversationalAgent", "label": "AgentExecutor", - "type": "AgentExecutor | BaseChain" + "type": "AgentExecutor | BaseChain | Runnable" } ], "outputs": {}, @@ -385,8 +385,8 @@ }, "selected": false, "positionAbsolute": { - "x": 1164.4550359451973, - "y": 283.40041124403075 + "x": 1191.1524476753796, + "y": 324.2479396683294 }, "dragging": false } @@ -418,9 +418,9 @@ "source": "chatOpenAI_0", "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", "target": "conversationalAgent_0", - "targetHandle": "conversationalAgent_0-input-model-BaseLanguageModel", + "targetHandle": "conversationalAgent_0-input-model-BaseChatModel", "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseLanguageModel", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseChatModel", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json b/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json index aafc8e8e2..0e9e41bdd 100644 --- a/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json +++ b/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json @@ -98,7 +98,7 @@ "data": { "id": "conversationalRetrievalAgent_0", "label": "Conversational Retrieval Agent", - "version": 1, + "version": 3, "name": "conversationalRetrievalAgent", "type": "AgentExecutor", "baseClasses": ["AgentExecutor", "BaseChain", "Runnable"], @@ -130,10 +130,10 @@ "id": "conversationalRetrievalAgent_0-input-memory-BaseChatMemory" }, { - "label": "OpenAI Chat Model", + "label": "OpenAI/Azure Chat Model", "name": "model", - "type": "ChatOpenAI", - "id": "conversationalRetrievalAgent_0-input-model-ChatOpenAI" + "type": "BaseChatModel", + "id": "conversationalRetrievalAgent_0-input-model-BaseChatModel" } ], "inputs": { @@ -642,9 +642,9 @@ "source": "chatOpenAI_0", "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", "target": "conversationalRetrievalAgent_0", - "targetHandle": "conversationalRetrievalAgent_0-input-model-ChatOpenAI", + "targetHandle": "conversationalRetrievalAgent_0-input-model-BaseChatModel", "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalRetrievalAgent_0-conversationalRetrievalAgent_0-input-model-ChatOpenAI", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalRetrievalAgent_0-conversationalRetrievalAgent_0-input-model-BaseChatModel", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/Long Term Memory.json b/packages/server/marketplaces/chatflows/Long Term Memory.json index c508b4807..c39f746a2 100644 --- a/packages/server/marketplaces/chatflows/Long Term Memory.json +++ b/packages/server/marketplaces/chatflows/Long Term Memory.json @@ -205,7 +205,7 @@ "data": { "id": "ZepMemory_0", "label": "Zep Memory", - "version": 1, + "version": 2, "name": "ZepMemory", "type": "ZepMemory", "baseClasses": ["ZepMemory", "BaseChatMemory", "BaseMemory"], @@ -228,13 +228,6 @@ "default": "http://127.0.0.1:8000", "id": "ZepMemory_0-input-baseURL-string" }, - { - "label": "Auto Summary", - "name": "autoSummary", - "type": "boolean", - "default": true, - "id": "ZepMemory_0-input-autoSummary-boolean" - }, { "label": "Session Id", "name": "sessionId", @@ -251,17 +244,10 @@ "type": "number", "default": "10", "step": 1, + "additionalParams": true, "description": "Window of size k to surface the last k back-and-forths to use as memory.", "id": "ZepMemory_0-input-k-number" }, - { - "label": "Auto Summary Template", - "name": "autoSummaryTemplate", - "type": "string", - "default": "This is the summary of the following conversation:\n{summary}", - "additionalParams": true, - "id": "ZepMemory_0-input-autoSummaryTemplate-string" - }, { "label": "AI Prefix", "name": "aiPrefix", @@ -306,10 +292,8 @@ "inputAnchors": [], "inputs": { "baseURL": "http://127.0.0.1:8000", - "autoSummary": true, "sessionId": "", "k": "10", - "autoSummaryTemplate": "This is the summary of the following conversation:\n{summary}", "aiPrefix": "ai", "humanPrefix": "human", "memoryKey": "chat_history", diff --git a/packages/server/marketplaces/chatflows/Multiple VectorDB.json b/packages/server/marketplaces/chatflows/Multiple VectorDB.json index e77186160..789b0c08e 100644 --- a/packages/server/marketplaces/chatflows/Multiple VectorDB.json +++ b/packages/server/marketplaces/chatflows/Multiple VectorDB.json @@ -1127,81 +1127,6 @@ }, "dragging": false }, - { - "width": 300, - "height": 383, - "id": "conversationalAgent_0", - "position": { - "x": 2506.011817109287, - "y": -241.58006840004734 - }, - "type": "customNode", - "data": { - "id": "conversationalAgent_0", - "label": "Conversational Agent", - "version": 1, - "name": "conversationalAgent", - "type": "AgentExecutor", - "baseClasses": ["AgentExecutor", "BaseChain", "Runnable"], - "category": "Agents", - "description": "Conversational agent for a chat model. It will utilize chat specific prompts", - "inputParams": [ - { - "label": "System Message", - "name": "systemMessage", - "type": "string", - "rows": 4, - "default": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist.", - "optional": true, - "additionalParams": true, - "id": "conversationalAgent_0-input-systemMessage-string" - } - ], - "inputAnchors": [ - { - "label": "Allowed Tools", - "name": "tools", - "type": "Tool", - "list": true, - "id": "conversationalAgent_0-input-tools-Tool" - }, - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "conversationalAgent_0-input-model-BaseLanguageModel" - }, - { - "label": "Memory", - "name": "memory", - "type": "BaseChatMemory", - "id": "conversationalAgent_0-input-memory-BaseChatMemory" - } - ], - "inputs": { - "tools": ["{{chainTool_2.data.instance}}", "{{chainTool_3.data.instance}}"], - "model": "{{chatOpenAI_2.data.instance}}", - "memory": "{{bufferMemory_0.data.instance}}", - "systemMessage": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist." - }, - "outputAnchors": [ - { - "id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|Runnable", - "name": "conversationalAgent", - "label": "AgentExecutor", - "type": "AgentExecutor | BaseChain | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 2506.011817109287, - "y": -241.58006840004734 - }, - "dragging": false - }, { "width": 300, "height": 574, @@ -1602,6 +1527,81 @@ "y": 75.96855802341503 }, "dragging": false + }, + { + "width": 300, + "height": 383, + "id": "conversationalAgent_0", + "position": { + "x": 2432.125364763489, + "y": -105.27942167533908 + }, + "type": "customNode", + "data": { + "id": "conversationalAgent_0", + "label": "Conversational Agent", + "version": 2, + "name": "conversationalAgent", + "type": "AgentExecutor", + "baseClasses": ["AgentExecutor", "BaseChain", "Runnable"], + "category": "Agents", + "description": "Conversational agent for a chat model. It will utilize chat specific prompts", + "inputParams": [ + { + "label": "System Message", + "name": "systemMessage", + "type": "string", + "rows": 4, + "default": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist.", + "optional": true, + "additionalParams": true, + "id": "conversationalAgent_0-input-systemMessage-string" + } + ], + "inputAnchors": [ + { + "label": "Allowed Tools", + "name": "tools", + "type": "Tool", + "list": true, + "id": "conversationalAgent_0-input-tools-Tool" + }, + { + "label": "Language Model", + "name": "model", + "type": "BaseChatModel", + "id": "conversationalAgent_0-input-model-BaseChatModel" + }, + { + "label": "Memory", + "name": "memory", + "type": "BaseChatMemory", + "id": "conversationalAgent_0-input-memory-BaseChatMemory" + } + ], + "inputs": { + "tools": ["{{chainTool_2.data.instance}}", "{{chainTool_3.data.instance}}"], + "model": "{{chatOpenAI_2.data.instance}}", + "memory": "{{bufferMemory_0.data.instance}}", + "systemMessage": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist." + }, + "outputAnchors": [ + { + "id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|Runnable", + "name": "conversationalAgent", + "label": "AgentExecutor", + "type": "AgentExecutor | BaseChain | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 2432.125364763489, + "y": -105.27942167533908 + }, + "dragging": false } ], "edges": [ @@ -1704,6 +1704,28 @@ "label": "" } }, + { + "source": "plainText_1", + "sourceHandle": "plainText_1-output-document-Document", + "target": "faiss_0", + "targetHandle": "faiss_0-input-document-Document", + "type": "buttonedge", + "id": "plainText_1-plainText_1-output-document-Document-faiss_0-faiss_0-input-document-Document", + "data": { + "label": "" + } + }, + { + "source": "recursiveCharacterTextSplitter_0", + "sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer|Runnable", + "target": "plainText_1", + "targetHandle": "plainText_1-input-textSplitter-TextSplitter", + "type": "buttonedge", + "id": "recursiveCharacterTextSplitter_0-recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer|Runnable-plainText_1-plainText_1-input-textSplitter-TextSplitter", + "data": { + "label": "" + } + }, { "source": "chainTool_2", "sourceHandle": "chainTool_2-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", @@ -1730,9 +1752,9 @@ "source": "chatOpenAI_2", "sourceHandle": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", "target": "conversationalAgent_0", - "targetHandle": "conversationalAgent_0-input-model-BaseLanguageModel", + "targetHandle": "conversationalAgent_0-input-model-BaseChatModel", "type": "buttonedge", - "id": "chatOpenAI_2-chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalAgent_0-conversationalAgent_0-input-model-BaseLanguageModel", + "id": "chatOpenAI_2-chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalAgent_0-conversationalAgent_0-input-model-BaseChatModel", "data": { "label": "" } @@ -1747,28 +1769,6 @@ "data": { "label": "" } - }, - { - "source": "plainText_1", - "sourceHandle": "plainText_1-output-document-Document", - "target": "faiss_0", - "targetHandle": "faiss_0-input-document-Document", - "type": "buttonedge", - "id": "plainText_1-plainText_1-output-document-Document-faiss_0-faiss_0-input-document-Document", - "data": { - "label": "" - } - }, - { - "source": "recursiveCharacterTextSplitter_0", - "sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer|Runnable", - "target": "plainText_1", - "targetHandle": "plainText_1-input-textSplitter-TextSplitter", - "type": "buttonedge", - "id": "recursiveCharacterTextSplitter_0-recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer|Runnable-plainText_1-plainText_1-input-textSplitter-TextSplitter", - "data": { - "label": "" - } } ] } diff --git a/packages/server/marketplaces/chatflows/OpenAI Agent.json b/packages/server/marketplaces/chatflows/OpenAI Agent.json index a4944af09..17e59236d 100644 --- a/packages/server/marketplaces/chatflows/OpenAI Agent.json +++ b/packages/server/marketplaces/chatflows/OpenAI Agent.json @@ -13,8 +13,8 @@ "data": { "id": "calculator_0", "label": "Calculator", - "name": "calculator", "version": 1, + "name": "calculator", "type": "Calculator", "baseClasses": ["Calculator", "Tool", "StructuredTool", "BaseLangChain", "Serializable"], "category": "Tools", @@ -52,8 +52,8 @@ "data": { "id": "bufferMemory_0", "label": "Buffer Memory", - "name": "bufferMemory", "version": 1, + "name": "bufferMemory", "type": "BufferMemory", "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"], "category": "Memory", @@ -109,8 +109,8 @@ "data": { "id": "customTool_0", "label": "Custom Tool", - "name": "customTool", "version": 1, + "name": "customTool", "type": "CustomTool", "baseClasses": ["CustomTool", "Tool", "StructuredTool"], "category": "Tools", @@ -158,8 +158,8 @@ "data": { "id": "serper_0", "label": "Serper", - "name": "serper", "version": 1, + "name": "serper", "type": "Serper", "baseClasses": ["Serper", "Tool", "StructuredTool"], "category": "Tools", @@ -206,7 +206,7 @@ "id": "openAIFunctionAgent_0", "label": "OpenAI Function Agent", "name": "openAIFunctionAgent", - "version": 1, + "version": 3, "type": "AgentExecutor", "baseClasses": ["AgentExecutor", "BaseChain"], "category": "Agents", @@ -237,9 +237,8 @@ "id": "openAIFunctionAgent_0-input-memory-BaseChatMemory" }, { - "label": "OpenAI Chat Model", + "label": "OpenAI/Azure Chat Model", "name": "model", - "description": "Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer docs for more info", "type": "BaseChatModel", "id": "openAIFunctionAgent_0-input-model-BaseChatModel" } @@ -270,7 +269,7 @@ }, { "width": 300, - "height": 523, + "height": 574, "id": "chatOpenAI_0", "position": { "x": 817.8210275868742, @@ -280,8 +279,8 @@ "data": { "id": "chatOpenAI_0", "label": "ChatOpenAI", - "name": "chatOpenAI", "version": 2, + "name": "chatOpenAI", "type": "ChatOpenAI", "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], "category": "Chat Models", @@ -474,17 +473,6 @@ "label": "" } }, - { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "target": "openAIFunctionAgent_0", - "targetHandle": "openAIFunctionAgent_0-input-model-BaseChatModel", - "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-openAIFunctionAgent_0-openAIFunctionAgent_0-input-model-BaseChatModel", - "data": { - "label": "" - } - }, { "source": "bufferMemory_0", "sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", @@ -495,6 +483,17 @@ "data": { "label": "" } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", + "target": "openAIFunctionAgent_0", + "targetHandle": "openAIFunctionAgent_0-input-model-BaseChatModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-openAIFunctionAgent_0-openAIFunctionAgent_0-input-model-BaseChatModel", + "data": { + "label": "" + } } ] } diff --git a/packages/server/marketplaces/chatflows/OpenAI Assistant.json b/packages/server/marketplaces/chatflows/OpenAI Assistant.json index 2f9a860cc..ba4c61343 100644 --- a/packages/server/marketplaces/chatflows/OpenAI Assistant.json +++ b/packages/server/marketplaces/chatflows/OpenAI Assistant.json @@ -14,7 +14,7 @@ "data": { "id": "openAIAssistant_0", "label": "OpenAI Assistant", - "version": 1, + "version": 2, "name": "openAIAssistant", "type": "OpenAIAssistant", "baseClasses": ["OpenAIAssistant"], @@ -27,6 +27,15 @@ "type": "asyncOptions", "loadMethod": "listAssistants", "id": "openAIAssistant_0-input-selectedAssistant-asyncOptions" + }, + { + "label": "Disable File Download", + "name": "disableFileDownload", + "type": "boolean", + "description": "Messages can contain text, images, or files. In some cases, you may want to prevent others from downloading the files. Learn more from OpenAI File Annotation docs", + "optional": true, + "additionalParams": true, + "id": "openAIAssistant_0-input-disableFileDownload-boolean" } ], "inputAnchors": [ diff --git a/packages/server/marketplaces/chatflows/WebBrowser.json b/packages/server/marketplaces/chatflows/WebBrowser.json index 2f6fb7218..0547366a6 100644 --- a/packages/server/marketplaces/chatflows/WebBrowser.json +++ b/packages/server/marketplaces/chatflows/WebBrowser.json @@ -13,8 +13,8 @@ "data": { "id": "bufferMemory_0", "label": "Buffer Memory", - "name": "bufferMemory", "version": 1, + "name": "bufferMemory", "type": "BufferMemory", "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"], "category": "Memory", @@ -70,8 +70,8 @@ "data": { "id": "webBrowser_0", "label": "Web Browser", - "name": "webBrowser", "version": 1, + "name": "webBrowser", "type": "WebBrowser", "baseClasses": ["WebBrowser", "Tool", "StructuredTool", "BaseLangChain"], "category": "Tools", @@ -115,82 +115,7 @@ }, { "width": 300, - "height": 383, - "id": "conversationalAgent_0", - "position": { - "x": 1464.513303631911, - "y": 155.73036805253955 - }, - "type": "customNode", - "data": { - "id": "conversationalAgent_0", - "label": "Conversational Agent", - "name": "conversationalAgent", - "version": 1, - "type": "AgentExecutor", - "baseClasses": ["AgentExecutor", "BaseChain"], - "category": "Agents", - "description": "Conversational agent for a chat model. It will utilize chat specific prompts", - "inputParams": [ - { - "label": "System Message", - "name": "systemMessage", - "type": "string", - "rows": 4, - "default": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist.", - "optional": true, - "additionalParams": true, - "id": "conversationalAgent_0-input-systemMessage-string" - } - ], - "inputAnchors": [ - { - "label": "Allowed Tools", - "name": "tools", - "type": "Tool", - "list": true, - "id": "conversationalAgent_0-input-tools-Tool" - }, - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "conversationalAgent_0-input-model-BaseLanguageModel" - }, - { - "label": "Memory", - "name": "memory", - "type": "BaseChatMemory", - "id": "conversationalAgent_0-input-memory-BaseChatMemory" - } - ], - "inputs": { - "tools": ["{{webBrowser_0.data.instance}}"], - "model": "{{chatOpenAI_1.data.instance}}", - "memory": "{{bufferMemory_0.data.instance}}", - "systemMessage": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist." - }, - "outputAnchors": [ - { - "id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain", - "name": "conversationalAgent", - "label": "AgentExecutor", - "type": "AgentExecutor | BaseChain" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1464.513303631911, - "y": 155.73036805253955 - }, - "dragging": false - }, - { - "width": 300, - "height": 523, + "height": 574, "id": "chatOpenAI_0", "position": { "x": 734.7477982032904, @@ -200,8 +125,8 @@ "data": { "id": "chatOpenAI_0", "label": "ChatOpenAI", - "name": "chatOpenAI", "version": 2, + "name": "chatOpenAI", "type": "ChatOpenAI", "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], "category": "Chat Models", @@ -371,8 +296,8 @@ "data": { "id": "openAIEmbeddings_0", "label": "OpenAI Embeddings", - "name": "openAIEmbeddings", "version": 1, + "name": "openAIEmbeddings", "type": "OpenAIEmbeddings", "baseClasses": ["OpenAIEmbeddings", "Embeddings"], "category": "Embeddings", @@ -445,7 +370,7 @@ }, { "width": 300, - "height": 523, + "height": 574, "id": "chatOpenAI_1", "position": { "x": 68.312124033115, @@ -455,8 +380,8 @@ "data": { "id": "chatOpenAI_1", "label": "ChatOpenAI", - "name": "chatOpenAI", "version": 2, + "name": "chatOpenAI", "type": "ChatOpenAI", "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], "category": "Chat Models", @@ -586,7 +511,7 @@ } ], "inputs": { - "modelName": "gpt-3.5-turbo", + "modelName": "gpt-3.5-turbo-16k", "temperature": 0.9, "maxTokens": "", "topP": "", @@ -613,6 +538,81 @@ "y": -239.65476709991256 }, "dragging": false + }, + { + "width": 300, + "height": 383, + "id": "conversationalAgent_0", + "position": { + "x": 1518.944765840293, + "y": 212.2513364217197 + }, + "type": "customNode", + "data": { + "id": "conversationalAgent_0", + "label": "Conversational Agent", + "version": 2, + "name": "conversationalAgent", + "type": "AgentExecutor", + "baseClasses": ["AgentExecutor", "BaseChain", "Runnable"], + "category": "Agents", + "description": "Conversational agent for a chat model. It will utilize chat specific prompts", + "inputParams": [ + { + "label": "System Message", + "name": "systemMessage", + "type": "string", + "rows": 4, + "default": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist.", + "optional": true, + "additionalParams": true, + "id": "conversationalAgent_0-input-systemMessage-string" + } + ], + "inputAnchors": [ + { + "label": "Allowed Tools", + "name": "tools", + "type": "Tool", + "list": true, + "id": "conversationalAgent_0-input-tools-Tool" + }, + { + "label": "Language Model", + "name": "model", + "type": "BaseChatModel", + "id": "conversationalAgent_0-input-model-BaseChatModel" + }, + { + "label": "Memory", + "name": "memory", + "type": "BaseChatMemory", + "id": "conversationalAgent_0-input-memory-BaseChatMemory" + } + ], + "inputs": { + "tools": ["{{webBrowser_0.data.instance}}"], + "model": "{{chatOpenAI_1.data.instance}}", + "memory": "{{bufferMemory_0.data.instance}}", + "systemMessage": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist." + }, + "outputAnchors": [ + { + "id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|Runnable", + "name": "conversationalAgent", + "label": "AgentExecutor", + "type": "AgentExecutor | BaseChain | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1518.944765840293, + "y": 212.2513364217197 + }, + "dragging": false } ], "edges": [ @@ -638,17 +638,6 @@ "label": "" } }, - { - "source": "chatOpenAI_1", - "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "target": "conversationalAgent_0", - "targetHandle": "conversationalAgent_0-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseLanguageModel", - "data": { - "label": "" - } - }, { "source": "webBrowser_0", "sourceHandle": "webBrowser_0-output-webBrowser-WebBrowser|Tool|StructuredTool|BaseLangChain", @@ -660,6 +649,17 @@ "label": "" } }, + { + "source": "chatOpenAI_1", + "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-model-BaseChatModel", + "type": "buttonedge", + "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseChatModel", + "data": { + "label": "" + } + }, { "source": "bufferMemory_0", "sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", diff --git a/packages/server/package.json b/packages/server/package.json index 3503e9821..ab1f61492 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,6 +1,6 @@ { "name": "flowise", - "version": "1.4.2", + "version": "1.4.5", "description": "Flowiseai Server", "main": "dist/index", "types": "dist/index.d.ts", @@ -47,7 +47,7 @@ "dependencies": { "@oclif/core": "^1.13.10", "async-mutex": "^0.4.0", - "axios": "^0.27.2", + "axios": "1.6.2", "cors": "^2.8.5", "crypto-js": "^4.1.1", "dotenv": "^16.0.0", @@ -61,6 +61,7 @@ "mysql": "^2.18.1", "pg": "^8.11.1", "reflect-metadata": "^0.1.13", + "sanitize-html": "^2.11.0", "socket.io": "^4.6.1", "sqlite3": "^5.1.6", "typeorm": "^0.3.6", @@ -71,6 +72,7 @@ "@types/cors": "^2.8.12", "@types/crypto-js": "^4.1.1", "@types/multer": "^1.4.7", + "@types/sanitize-html": "^2.9.5", "concurrently": "^7.1.0", "nodemon": "^2.0.15", "oclif": "^3", diff --git a/packages/server/src/ChatflowPool.ts b/packages/server/src/ChatflowPool.ts index d296dcfed..325fac560 100644 --- a/packages/server/src/ChatflowPool.ts +++ b/packages/server/src/ChatflowPool.ts @@ -16,7 +16,7 @@ export class ChatflowPool { * @param {IReactFlowNode[]} startingNodes * @param {ICommonObject} overrideConfig */ - add(chatflowid: string, endingNodeData: INodeData, startingNodes: IReactFlowNode[], overrideConfig?: ICommonObject) { + add(chatflowid: string, endingNodeData: INodeData | undefined, startingNodes: IReactFlowNode[], overrideConfig?: ICommonObject) { this.activeChatflows[chatflowid] = { startingNodes, endingNodeData, diff --git a/packages/server/src/Interface.ts b/packages/server/src/Interface.ts index c562b4eec..f82c66902 100644 --- a/packages/server/src/Interface.ts +++ b/packages/server/src/Interface.ts @@ -172,7 +172,7 @@ export interface IncomingInput { export interface IActiveChatflows { [key: string]: { startingNodes: IReactFlowNode[] - endingNodeData: INodeData + endingNodeData?: INodeData inSync: boolean overrideConfig?: ICommonObject } @@ -190,12 +190,6 @@ export interface IOverrideConfig { type: string } -export interface IDatabaseExport { - chatmessages: IChatMessage[] - chatflows: IChatFlow[] - apikeys: ICommonObject[] -} - export type ICredentialDataDecrypted = ICommonObject // Plain credential object sent to server diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 91de4f4cd..2d40f32ed 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -17,7 +17,6 @@ import { IReactFlowNode, IReactFlowObject, INodeData, - IDatabaseExport, ICredentialReturnResponse, chatType, IChatMessage, @@ -31,18 +30,11 @@ import { constructGraphs, resolveVariables, isStartNodeDependOnInput, - getAPIKeys, - addAPIKey, - updateAPIKey, - deleteAPIKey, - compareKeys, mapMimeTypeToInputField, findAvailableConfigs, isSameOverrideConfig, - replaceAllAPIKeys, isFlowValidForStream, databaseEntities, - getApiKey, transformToCredentialEntity, decryptCredentialData, clearAllSessionMemory, @@ -50,7 +42,8 @@ import { getEncryptionKey, checkMemorySessionId, clearSessionMemoryFromViewMessageDialog, - getUserHome + getUserHome, + replaceChatHistory } from './utils' import { cloneDeep, omit, uniqWith, isEqual } from 'lodash' import { getDataSource } from './DataSource' @@ -62,8 +55,13 @@ import { Tool } from './database/entities/Tool' import { Assistant } from './database/entities/Assistant' import { ChatflowPool } from './ChatflowPool' import { CachePool } from './CachePool' -import { ICommonObject, INodeOptionsValue } from 'flowise-components' +import { ICommonObject, IMessage, INodeOptionsValue } from 'flowise-components' import { createRateLimiter, getRateLimiter, initializeRateLimiter } from './utils/rateLimit' +import { addAPIKey, compareKeys, deleteAPIKey, getApiKey, getAPIKeys, updateAPIKey } from './utils/apiKey' +import { sanitizeMiddleware } from './utils/XSS' +import axios from 'axios' +import { Client } from 'langchainhub' +import { parsePrompt } from './utils/hub' export class App { app: express.Application @@ -121,9 +119,15 @@ export class App { // Allow access from * this.app.use(cors()) + // Switch off the default 'X-Powered-By: Express' header + this.app.disable('x-powered-by') + // Add the expressRequestLogger middleware to log all requests this.app.use(expressRequestLogger) + // Add the sanitizeMiddleware to guard against XSS + this.app.use(sanitizeMiddleware) + if (process.env.FLOWISE_USERNAME && process.env.FLOWISE_PASSWORD) { const username = process.env.FLOWISE_USERNAME const password = process.env.FLOWISE_PASSWORD @@ -196,7 +200,7 @@ export class App { // Get component credential via name this.app.get('/api/v1/components-credentials/:name', (req: Request, res: Response) => { - if (!req.params.name.includes('&')) { + if (!req.params.name.includes('&')) { if (Object.prototype.hasOwnProperty.call(this.nodesPool.componentCredentials, req.params.name)) { return res.json(this.nodesPool.componentCredentials[req.params.name]) } else { @@ -204,7 +208,7 @@ export class App { } } else { const returnResponse = [] - for (const name of req.params.name.split('&')) { + for (const name of req.params.name.split('&')) { if (Object.prototype.hasOwnProperty.call(this.nodesPool.componentCredentials, name)) { returnResponse.push(this.nodesPool.componentCredentials[name]) } else { @@ -970,6 +974,12 @@ export class App { // Download file from assistant this.app.post('/api/v1/openai-assistants-file', async (req: Request, res: Response) => { const filePath = path.join(getUserHome(), '.flowise', 'openai-assistant', req.body.fileName) + //raise error if file path is not absolute + if (!path.isAbsolute(filePath)) return res.status(500).send(`Invalid file path`) + //raise error if file path contains '..' + if (filePath.includes('..')) return res.status(500).send(`Invalid file path`) + //only return from the .flowise openai-assistant folder + if (!(filePath.includes('.flowise') && filePath.includes('openai-assistant'))) return res.status(500).send(`Invalid file path`) res.setHeader('Content-Disposition', 'attachment; filename=' + path.basename(filePath)) const fileStream = fs.createReadStream(filePath) fileStream.pipe(res) @@ -1025,57 +1035,6 @@ export class App { } }) - // ---------------------------------------- - // Export Load Chatflow & ChatMessage & Apikeys - // ---------------------------------------- - - this.app.get('/api/v1/database/export', async (req: Request, res: Response) => { - const chatmessages = await this.AppDataSource.getRepository(ChatMessage).find() - const chatflows = await this.AppDataSource.getRepository(ChatFlow).find() - const apikeys = await getAPIKeys() - const result: IDatabaseExport = { - chatmessages, - chatflows, - apikeys - } - return res.json(result) - }) - - this.app.post('/api/v1/database/load', async (req: Request, res: Response) => { - const databaseItems: IDatabaseExport = req.body - - await this.AppDataSource.getRepository(ChatFlow).delete({}) - await this.AppDataSource.getRepository(ChatMessage).delete({}) - - let error = '' - - // Get a new query runner instance - const queryRunner = this.AppDataSource.createQueryRunner() - - // Start a new transaction - await queryRunner.startTransaction() - - try { - const chatflows: ChatFlow[] = databaseItems.chatflows - const chatmessages: ChatMessage[] = databaseItems.chatmessages - - await queryRunner.manager.insert(ChatFlow, chatflows) - await queryRunner.manager.insert(ChatMessage, chatmessages) - - await queryRunner.commitTransaction() - } catch (err: any) { - error = err?.message ?? 'Error loading database' - await queryRunner.rollbackTransaction() - } finally { - await queryRunner.release() - } - - await replaceAllAPIKeys(databaseItems.apikeys) - - if (error) return res.status(500).send(error) - return res.status(201).send('OK') - }) - // ---------------------------------------- // Upsert // ---------------------------------------- @@ -1093,6 +1052,35 @@ export class App { await this.buildChatflow(req, res, undefined, true, true) }) + // ---------------------------------------- + // Prompt from Hub + // ---------------------------------------- + this.app.post('/api/v1/load-prompt', async (req: Request, res: Response) => { + try { + let hub = new Client() + const prompt = await hub.pull(req.body.promptName) + const templates = parsePrompt(prompt) + return res.json({ status: 'OK', prompt: req.body.promptName, templates: templates }) + } catch (e: any) { + return res.json({ status: 'ERROR', prompt: req.body.promptName, error: e?.message }) + } + }) + + this.app.post('/api/v1/prompts-list', async (req: Request, res: Response) => { + try { + const tags = req.body.tags ? `tags=${req.body.tags}` : '' + // Default to 100, TODO: add pagination and use offset & limit + const url = `https://api.hub.langchain.com/repos/?limit=100&${tags}has_commits=true&sort_field=num_likes&sort_direction=desc&is_archived=false` + axios.get(url).then((response) => { + if (response.data.repos) { + return res.json({ status: 'OK', repos: response.data.repos }) + } + }) + } catch (e: any) { + return res.json({ status: 'ERROR', repos: [] }) + } + }) + // ---------------------------------------- // Prediction // ---------------------------------------- @@ -1325,14 +1313,14 @@ export class App { * @param {IReactFlowEdge[]} edges * @returns {string | undefined} */ - findMemoryLabel(nodes: IReactFlowNode[], edges: IReactFlowEdge[]): string | undefined { + findMemoryLabel(nodes: IReactFlowNode[], edges: IReactFlowEdge[]): IReactFlowNode | undefined { const memoryNodes = nodes.filter((node) => node.data.category === 'Memory') const memoryNodeIds = memoryNodes.map((mem) => mem.data.id) for (const edge of edges) { if (memoryNodeIds.includes(edge.source)) { const memoryNode = nodes.find((node) => node.data.id === edge.source) - return memoryNode ? memoryNode.data.label : undefined + return memoryNode } } return undefined @@ -1398,16 +1386,19 @@ export class App { const nodes = parsedFlowData.nodes const edges = parsedFlowData.edges - /* Reuse the flow without having to rebuild (to avoid duplicated upsert, recomputation) when all these conditions met: + /* Reuse the flow without having to rebuild (to avoid duplicated upsert, recomputation, reinitialization of memory) when all these conditions met: * - Node Data already exists in pool * - Still in sync (i.e the flow has not been modified since) * - Existing overrideConfig and new overrideConfig are the same * - Flow doesn't start with/contain nodes that depend on incomingInput.question + * - Its not an Upsert request + * TODO: convert overrideConfig to hash when we no longer store base64 string but filepath ***/ const isFlowReusable = () => { return ( Object.prototype.hasOwnProperty.call(this.chatflowPool.activeChatflows, chatflowid) && this.chatflowPool.activeChatflows[chatflowid].inSync && + this.chatflowPool.activeChatflows[chatflowid].endingNodeData && isSameOverrideConfig( isInternal, this.chatflowPool.activeChatflows[chatflowid].overrideConfig, @@ -1419,7 +1410,7 @@ export class App { } if (isFlowReusable()) { - nodeToExecuteData = this.chatflowPool.activeChatflows[chatflowid].endingNodeData + nodeToExecuteData = this.chatflowPool.activeChatflows[chatflowid].endingNodeData as INodeData isStreamValid = isFlowValidForStream(nodes, nodeToExecuteData) logger.debug( `[server]: Reuse existing chatflow ${chatflowid} with ending node ${nodeToExecuteData.label} (${nodeToExecuteData.id})` @@ -1453,10 +1444,24 @@ export class App { isStreamValid = isFlowValidForStream(nodes, endingNodeData) + let chatHistory: IMessage[] | string = incomingInput.history + if ( + endingNodeData.inputs?.memory && + !incomingInput.history && + (incomingInput.chatId || incomingInput.overrideConfig?.sessionId) + ) { + const memoryNodeId = endingNodeData.inputs?.memory.split('.')[0].replace('{{', '') + const memoryNode = nodes.find((node) => node.data.id === memoryNodeId) + if (memoryNode) { + chatHistory = await replaceChatHistory(memoryNode, incomingInput, this.AppDataSource, databaseEntities, logger) + } + } + /*** Get Starting Nodes with Non-Directed Graph ***/ const constructedObj = constructGraphs(nodes, edges, true) const nonDirectedGraph = constructedObj.graph const { startingNodeIds, depthQueue } = getStartingNodes(nonDirectedGraph, endingNodeId) + const startingNodes = nodes.filter((nd) => startingNodeIds.includes(nd.id)) logger.debug(`[server]: Start building chatflow ${chatflowid}`) /*** BFS to traverse from Starting Nodes to Ending Node ***/ @@ -1467,7 +1472,7 @@ export class App { depthQueue, this.nodesPool.componentNodes, incomingInput.question, - incomingInput.history, + chatHistory, chatId, chatflowid, this.AppDataSource, @@ -1476,22 +1481,26 @@ export class App { isUpsert, incomingInput.stopNodeId ) - if (isUpsert) return res.status(201).send('Successfully Upserted') + if (isUpsert) { + this.chatflowPool.add(chatflowid, undefined, startingNodes, incomingInput?.overrideConfig) + return res.status(201).send('Successfully Upserted') + } const nodeToExecute = reactFlowNodes.find((node: IReactFlowNode) => node.id === endingNodeId) if (!nodeToExecute) return res.status(404).send(`Node ${endingNodeId} not found`) - if (incomingInput.overrideConfig) + if (incomingInput.overrideConfig) { nodeToExecute.data = replaceInputsWithConfig(nodeToExecute.data, incomingInput.overrideConfig) + } + const reactFlowNodeData: INodeData = resolveVariables( nodeToExecute.data, reactFlowNodes, incomingInput.question, - incomingInput.history + chatHistory ) nodeToExecuteData = reactFlowNodeData - const startingNodes = nodes.filter((nd) => startingNodeIds.includes(nd.id)) this.chatflowPool.add(chatflowid, nodeToExecuteData, startingNodes, incomingInput?.overrideConfig) } @@ -1504,11 +1513,18 @@ export class App { let sessionId = undefined if (nodeToExecuteData.instance) sessionId = checkMemorySessionId(nodeToExecuteData.instance, chatId) - const memoryType = this.findMemoryLabel(nodes, edges) + const memoryNode = this.findMemoryLabel(nodes, edges) + const memoryType = memoryNode?.data.label + + let chatHistory: IMessage[] | string = incomingInput.history + if (memoryNode && !incomingInput.history && (incomingInput.chatId || incomingInput.overrideConfig?.sessionId)) { + chatHistory = await replaceChatHistory(memoryNode, incomingInput, this.AppDataSource, databaseEntities, logger) + } let result = isStreamValid ? await nodeInstance.run(nodeToExecuteData, incomingInput.question, { - chatHistory: incomingInput.history, + chatflowid, + chatHistory, socketIO, socketIOClientId: incomingInput.socketIOClientId, logger, @@ -1518,7 +1534,8 @@ export class App { chatId }) : await nodeInstance.run(nodeToExecuteData, incomingInput.question, { - chatHistory: incomingInput.history, + chatflowid, + chatHistory, logger, appDataSource: this.AppDataSource, databaseEntities, diff --git a/packages/server/src/utils/XSS.ts b/packages/server/src/utils/XSS.ts new file mode 100644 index 000000000..5d8b81e91 --- /dev/null +++ b/packages/server/src/utils/XSS.ts @@ -0,0 +1,20 @@ +import { Request, Response, NextFunction } from 'express' +import sanitizeHtml from 'sanitize-html' + +export function sanitizeMiddleware(req: Request, res: Response, next: NextFunction): void { + // decoding is necessary as the url is encoded by the browser + const decodedURI = decodeURI(req.url) + req.url = sanitizeHtml(decodedURI) + for (let p in req.query) { + if (Array.isArray(req.query[p])) { + const sanitizedQ = [] + for (const q of req.query[p] as string[]) { + sanitizedQ.push(sanitizeHtml(q)) + } + req.query[p] = sanitizedQ + } else { + req.query[p] = sanitizeHtml(req.query[p] as string) + } + } + next() +} diff --git a/packages/server/src/utils/apiKey.ts b/packages/server/src/utils/apiKey.ts new file mode 100644 index 000000000..08a9ecd37 --- /dev/null +++ b/packages/server/src/utils/apiKey.ts @@ -0,0 +1,147 @@ +import { randomBytes, scryptSync, timingSafeEqual } from 'crypto' +import { ICommonObject } from 'flowise-components' +import moment from 'moment' +import fs from 'fs' +import path from 'path' +import logger from './logger' + +/** + * Returns the api key path + * @returns {string} + */ +export const getAPIKeyPath = (): string => { + return process.env.APIKEY_PATH ? path.join(process.env.APIKEY_PATH, 'api.json') : path.join(__dirname, '..', '..', 'api.json') +} + +/** + * Generate the api key + * @returns {string} + */ +export const generateAPIKey = (): string => { + const buffer = randomBytes(32) + return buffer.toString('base64') +} + +/** + * Generate the secret key + * @param {string} apiKey + * @returns {string} + */ +export const generateSecretHash = (apiKey: string): string => { + const salt = randomBytes(8).toString('hex') + const buffer = scryptSync(apiKey, salt, 64) as Buffer + return `${buffer.toString('hex')}.${salt}` +} + +/** + * Verify valid keys + * @param {string} storedKey + * @param {string} suppliedKey + * @returns {boolean} + */ +export const compareKeys = (storedKey: string, suppliedKey: string): boolean => { + const [hashedPassword, salt] = storedKey.split('.') + const buffer = scryptSync(suppliedKey, salt, 64) as Buffer + return timingSafeEqual(Buffer.from(hashedPassword, 'hex'), buffer) +} + +/** + * Get API keys + * @returns {Promise} + */ +export const getAPIKeys = async (): Promise => { + try { + const content = await fs.promises.readFile(getAPIKeyPath(), 'utf8') + return JSON.parse(content) + } catch (error) { + const keyName = 'DefaultKey' + const apiKey = generateAPIKey() + const apiSecret = generateSecretHash(apiKey) + const content = [ + { + keyName, + apiKey, + apiSecret, + createdAt: moment().format('DD-MMM-YY'), + id: randomBytes(16).toString('hex') + } + ] + await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') + return content + } +} + +/** + * Add new API key + * @param {string} keyName + * @returns {Promise} + */ +export const addAPIKey = async (keyName: string): Promise => { + const existingAPIKeys = await getAPIKeys() + const apiKey = generateAPIKey() + const apiSecret = generateSecretHash(apiKey) + const content = [ + ...existingAPIKeys, + { + keyName, + apiKey, + apiSecret, + createdAt: moment().format('DD-MMM-YY'), + id: randomBytes(16).toString('hex') + } + ] + await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') + return content +} + +/** + * Get API Key details + * @param {string} apiKey + * @returns {Promise} + */ +export const getApiKey = async (apiKey: string) => { + const existingAPIKeys = await getAPIKeys() + const keyIndex = existingAPIKeys.findIndex((key) => key.apiKey === apiKey) + if (keyIndex < 0) return undefined + return existingAPIKeys[keyIndex] +} + +/** + * Update existing API key + * @param {string} keyIdToUpdate + * @param {string} newKeyName + * @returns {Promise} + */ +export const updateAPIKey = async (keyIdToUpdate: string, newKeyName: string): Promise => { + const existingAPIKeys = await getAPIKeys() + const keyIndex = existingAPIKeys.findIndex((key) => key.id === keyIdToUpdate) + if (keyIndex < 0) return [] + existingAPIKeys[keyIndex].keyName = newKeyName + await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(existingAPIKeys), 'utf8') + return existingAPIKeys +} + +/** + * Delete API key + * @param {string} keyIdToDelete + * @returns {Promise} + */ +export const deleteAPIKey = async (keyIdToDelete: string): Promise => { + const existingAPIKeys = await getAPIKeys() + const result = existingAPIKeys.filter((key) => key.id !== keyIdToDelete) + await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(result), 'utf8') + return result +} + +/** + * Replace all api keys + * @param {ICommonObject[]} content + * @returns {Promise} + */ +export const replaceAllAPIKeys = async (content: ICommonObject[]): Promise => { + try { + await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') + } catch (error) { + logger.error(error) + } +} diff --git a/packages/server/src/utils/hub.ts b/packages/server/src/utils/hub.ts new file mode 100644 index 000000000..9a3242283 --- /dev/null +++ b/packages/server/src/utils/hub.ts @@ -0,0 +1,36 @@ +export function parsePrompt(prompt: string): any[] { + const promptObj = JSON.parse(prompt) + let response = [] + if (promptObj.kwargs.messages) { + promptObj.kwargs.messages.forEach((message: any) => { + let messageType = message.id.includes('SystemMessagePromptTemplate') + ? 'systemMessagePrompt' + : message.id.includes('HumanMessagePromptTemplate') + ? 'humanMessagePrompt' + : message.id.includes('AIMessagePromptTemplate') + ? 'aiMessagePrompt' + : 'template' + let messageTypeDisplay = message.id.includes('SystemMessagePromptTemplate') + ? 'System Message' + : message.id.includes('HumanMessagePromptTemplate') + ? 'Human Message' + : message.id.includes('AIMessagePromptTemplate') + ? 'AI Message' + : 'Message' + let template = message.kwargs.prompt.kwargs.template + response.push({ + type: messageType, + typeDisplay: messageTypeDisplay, + template: template + }) + }) + } else if (promptObj.kwargs.template) { + let template = promptObj.kwargs.template + response.push({ + type: 'template', + typeDisplay: 'Prompt', + template: template + }) + } + return response +} diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts index 86d626c44..2bf1c04a4 100644 --- a/packages/server/src/utils/index.ts +++ b/packages/server/src/utils/index.ts @@ -1,33 +1,33 @@ import path from 'path' import fs from 'fs' -import moment from 'moment' import logger from './logger' import { + IComponentCredentials, IComponentNodes, + ICredentialDataDecrypted, + ICredentialReqBody, IDepthQueue, IExploredNode, + INodeData, INodeDependencies, INodeDirectedGraph, INodeQueue, + IOverrideConfig, IReactFlowEdge, IReactFlowNode, IVariableDict, - INodeData, - IOverrideConfig, - ICredentialDataDecrypted, - IComponentCredentials, - ICredentialReqBody + IncomingInput } from '../Interface' import { cloneDeep, get, isEqual } from 'lodash' import { - ICommonObject, + convertChatHistoryToText, getInputVariables, - IDatabaseEntity, handleEscapeCharacters, - IMessage, - convertChatHistoryToText + ICommonObject, + IDatabaseEntity, + IMessage } from 'flowise-components' -import { scryptSync, randomBytes, timingSafeEqual } from 'crypto' +import { randomBytes } from 'crypto' import { AES, enc } from 'crypto-js' import { ChatFlow } from '../database/entities/ChatFlow' @@ -217,7 +217,7 @@ export const buildLangchain = async ( depthQueue: IDepthQueue, componentNodes: IComponentNodes, question: string, - chatHistory: IMessage[], + chatHistory: IMessage[] | string, chatId: string, chatflowid: string, appDataSource: DataSource, @@ -348,8 +348,8 @@ export const clearAllSessionMemory = async ( node.data.inputs.sessionId = sessionId } - if (newNodeInstance.clearSessionMemory) { - await newNodeInstance?.clearSessionMemory(node.data, { chatId, appDataSource, databaseEntities, logger }) + if (newNodeInstance.memoryMethods && newNodeInstance.memoryMethods.clearSessionMemory) { + await newNodeInstance.memoryMethods.clearSessionMemory(node.data, { chatId, appDataSource, databaseEntities, logger }) } } } @@ -381,8 +381,8 @@ export const clearSessionMemoryFromViewMessageDialog = async ( if (sessionId && node.data.inputs) node.data.inputs.sessionId = sessionId - if (newNodeInstance.clearSessionMemory) { - await newNodeInstance?.clearSessionMemory(node.data, { chatId, appDataSource, databaseEntities, logger }) + if (newNodeInstance.memoryMethods && newNodeInstance.memoryMethods.clearSessionMemory) { + await newNodeInstance.memoryMethods.clearSessionMemory(node.data, { chatId, appDataSource, databaseEntities, logger }) return } } @@ -400,7 +400,7 @@ export const getVariableValue = ( paramValue: string, reactFlowNodes: IReactFlowNode[], question: string, - chatHistory: IMessage[], + chatHistory: IMessage[] | string, isAcceptVariable = false ) => { let returnVal = paramValue @@ -433,7 +433,10 @@ export const getVariableValue = ( } if (isAcceptVariable && variableFullPath === CHAT_HISTORY_VAR_PREFIX) { - variableDict[`{{${variableFullPath}}}`] = handleEscapeCharacters(convertChatHistoryToText(chatHistory), false) + variableDict[`{{${variableFullPath}}}`] = handleEscapeCharacters( + typeof chatHistory === 'string' ? chatHistory : convertChatHistoryToText(chatHistory), + false + ) } // Split by first occurrence of '.' to get just nodeId @@ -476,7 +479,7 @@ export const resolveVariables = ( reactFlowNodeData: INodeData, reactFlowNodes: IReactFlowNode[], question: string, - chatHistory: IMessage[] + chatHistory: IMessage[] | string ): INodeData => { let flowNodeData = cloneDeep(reactFlowNodeData) const types = 'inputs' @@ -555,9 +558,20 @@ export const isStartNodeDependOnInput = (startingNodes: IReactFlowNode[], nodes: if (inputVariables.length > 0) return true } } - const whitelistNodeNames = ['vectorStoreToDocument', 'autoGPT'] + const whitelistNodeNames = ['vectorStoreToDocument', 'autoGPT', 'chatPromptTemplate', 'promptTemplate'] //If these nodes are found, chatflow cannot be reused for (const node of nodes) { - if (whitelistNodeNames.includes(node.data.name)) return true + if (node.data.name === 'chatPromptTemplate' || node.data.name === 'promptTemplate') { + let promptValues: ICommonObject = {} + const promptValuesRaw = node.data.inputs?.promptValues + if (promptValuesRaw) { + try { + promptValues = typeof promptValuesRaw === 'object' ? promptValuesRaw : JSON.parse(promptValuesRaw) + } catch (exception) { + console.error(exception) + } + } + if (getAllValuesFromJson(promptValues).includes(`{{${QUESTION_VAR_PREFIX}}}`)) return true + } else if (whitelistNodeNames.includes(node.data.name)) return true } return false } @@ -593,147 +607,6 @@ export const isSameOverrideConfig = ( return false } -/** - * Returns the api key path - * @returns {string} - */ -export const getAPIKeyPath = (): string => { - return process.env.APIKEY_PATH ? path.join(process.env.APIKEY_PATH, 'api.json') : path.join(__dirname, '..', '..', 'api.json') -} - -/** - * Generate the api key - * @returns {string} - */ -export const generateAPIKey = (): string => { - const buffer = randomBytes(32) - return buffer.toString('base64') -} - -/** - * Generate the secret key - * @param {string} apiKey - * @returns {string} - */ -export const generateSecretHash = (apiKey: string): string => { - const salt = randomBytes(8).toString('hex') - const buffer = scryptSync(apiKey, salt, 64) as Buffer - return `${buffer.toString('hex')}.${salt}` -} - -/** - * Verify valid keys - * @param {string} storedKey - * @param {string} suppliedKey - * @returns {boolean} - */ -export const compareKeys = (storedKey: string, suppliedKey: string): boolean => { - const [hashedPassword, salt] = storedKey.split('.') - const buffer = scryptSync(suppliedKey, salt, 64) as Buffer - return timingSafeEqual(Buffer.from(hashedPassword, 'hex'), buffer) -} - -/** - * Get API keys - * @returns {Promise} - */ -export const getAPIKeys = async (): Promise => { - try { - const content = await fs.promises.readFile(getAPIKeyPath(), 'utf8') - return JSON.parse(content) - } catch (error) { - const keyName = 'DefaultKey' - const apiKey = generateAPIKey() - const apiSecret = generateSecretHash(apiKey) - const content = [ - { - keyName, - apiKey, - apiSecret, - createdAt: moment().format('DD-MMM-YY'), - id: randomBytes(16).toString('hex') - } - ] - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') - return content - } -} - -/** - * Add new API key - * @param {string} keyName - * @returns {Promise} - */ -export const addAPIKey = async (keyName: string): Promise => { - const existingAPIKeys = await getAPIKeys() - const apiKey = generateAPIKey() - const apiSecret = generateSecretHash(apiKey) - const content = [ - ...existingAPIKeys, - { - keyName, - apiKey, - apiSecret, - createdAt: moment().format('DD-MMM-YY'), - id: randomBytes(16).toString('hex') - } - ] - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') - return content -} - -/** - * Get API Key details - * @param {string} apiKey - * @returns {Promise} - */ -export const getApiKey = async (apiKey: string) => { - const existingAPIKeys = await getAPIKeys() - const keyIndex = existingAPIKeys.findIndex((key) => key.apiKey === apiKey) - if (keyIndex < 0) return undefined - return existingAPIKeys[keyIndex] -} - -/** - * Update existing API key - * @param {string} keyIdToUpdate - * @param {string} newKeyName - * @returns {Promise} - */ -export const updateAPIKey = async (keyIdToUpdate: string, newKeyName: string): Promise => { - const existingAPIKeys = await getAPIKeys() - const keyIndex = existingAPIKeys.findIndex((key) => key.id === keyIdToUpdate) - if (keyIndex < 0) return [] - existingAPIKeys[keyIndex].keyName = newKeyName - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(existingAPIKeys), 'utf8') - return existingAPIKeys -} - -/** - * Delete API key - * @param {string} keyIdToDelete - * @returns {Promise} - */ -export const deleteAPIKey = async (keyIdToDelete: string): Promise => { - const existingAPIKeys = await getAPIKeys() - const result = existingAPIKeys.filter((key) => key.id !== keyIdToDelete) - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(result), 'utf8') - return result -} - -/** - * Replace all api keys - * @param {ICommonObject[]} content - * @returns {Promise} - */ -export const replaceAllAPIKeys = async (content: ICommonObject[]): Promise => { - try { - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') - } catch (error) { - logger.error(error) - } -} - /** * Map MimeType to InputField * @param {string} mimeType @@ -844,7 +717,7 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component */ export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNodeData: INodeData) => { const streamAvailableLLMs = { - 'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama'], + 'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama', 'awsChatBedrock'], LLMs: ['azureOpenAI', 'openAI', 'ollama'] } @@ -1015,3 +888,67 @@ export const checkMemorySessionId = (instance: any, chatId: string): string | un return instance.memory.chatHistory.sessionId return undefined } + +/** + * Replace chatHistory if incomingInput.history is empty and sessionId/chatId is provided + * @param {IReactFlowNode} memoryNode + * @param {IncomingInput} incomingInput + * @param {DataSource} appDataSource + * @param {IDatabaseEntity} databaseEntities + * @param {any} logger + * @returns {string} + */ +export const replaceChatHistory = async ( + memoryNode: IReactFlowNode, + incomingInput: IncomingInput, + appDataSource: DataSource, + databaseEntities: IDatabaseEntity, + logger: any +): Promise => { + const nodeInstanceFilePath = memoryNode.data.filePath as string + const nodeModule = await import(nodeInstanceFilePath) + const newNodeInstance = new nodeModule.nodeClass() + + if (incomingInput.overrideConfig?.sessionId && memoryNode.data.inputs) { + memoryNode.data.inputs.sessionId = incomingInput.overrideConfig.sessionId + } + + if (newNodeInstance.memoryMethods && newNodeInstance.memoryMethods.getChatMessages) { + return await newNodeInstance.memoryMethods.getChatMessages(memoryNode.data, { + chatId: incomingInput.chatId, + appDataSource, + databaseEntities, + logger + }) + } + + return '' +} + +/** + * Get all values from a JSON object + * @param {any} obj + * @returns {any[]} + */ +export const getAllValuesFromJson = (obj: any): any[] => { + const values: any[] = [] + + function extractValues(data: any) { + if (typeof data === 'object' && data !== null) { + if (Array.isArray(data)) { + for (const item of data) { + extractValues(item) + } + } else { + for (const key in data) { + extractValues(data[key]) + } + } + } else { + values.push(data) + } + } + + extractValues(obj) + return values +} diff --git a/packages/ui/package.json b/packages/ui/package.json index 6914c04a5..7a739978e 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,6 +1,6 @@ { "name": "flowise-ui", - "version": "1.4.0", + "version": "1.4.3", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://flowiseai.com", "author": { diff --git a/packages/ui/src/api/database.js b/packages/ui/src/api/database.js deleted file mode 100644 index f36fb72c7..000000000 --- a/packages/ui/src/api/database.js +++ /dev/null @@ -1,9 +0,0 @@ -import client from './client' - -const getExportDatabase = () => client.get('/database/export') -const createLoadDatabase = (body) => client.post('/database/load', body) - -export default { - getExportDatabase, - createLoadDatabase -} diff --git a/packages/ui/src/api/prompt.js b/packages/ui/src/api/prompt.js new file mode 100644 index 000000000..42b1bdbc4 --- /dev/null +++ b/packages/ui/src/api/prompt.js @@ -0,0 +1,9 @@ +import client from './client' + +const getAvailablePrompts = (body) => client.post(`/prompts-list`, body) +const getPrompt = (body) => client.post(`/load-prompt`, body) + +export default { + getAvailablePrompts, + getPrompt +} diff --git a/packages/ui/src/assets/images/prompt_empty.svg b/packages/ui/src/assets/images/prompt_empty.svg new file mode 100644 index 000000000..61df7e32e --- /dev/null +++ b/packages/ui/src/assets/images/prompt_empty.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.js b/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.js index c10b32894..ac114c6cc 100644 --- a/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.js +++ b/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.js @@ -1,7 +1,6 @@ import { useState, useRef, useEffect } from 'react' import PropTypes from 'prop-types' -import { useSelector, useDispatch } from 'react-redux' -import { useNavigate } from 'react-router-dom' +import { useSelector } from 'react-redux' // material-ui import { useTheme } from '@mui/material/styles' @@ -26,16 +25,10 @@ import PerfectScrollbar from 'react-perfect-scrollbar' // project imports import MainCard from 'ui-component/cards/MainCard' import Transitions from 'ui-component/extended/Transitions' -import { BackdropLoader } from 'ui-component/loading/BackdropLoader' import AboutDialog from 'ui-component/dialog/AboutDialog' // assets -import { IconLogout, IconSettings, IconFileExport, IconFileDownload, IconInfoCircle } from '@tabler/icons' - -// API -import databaseApi from 'api/database' - -import { SET_MENU } from 'store/actions' +import { IconLogout, IconSettings, IconInfoCircle } from '@tabler/icons' import './index.css' @@ -43,17 +36,13 @@ import './index.css' const ProfileSection = ({ username, handleLogout }) => { const theme = useTheme() - const dispatch = useDispatch() - const navigate = useNavigate() const customization = useSelector((state) => state.customization) const [open, setOpen] = useState(false) - const [loading, setLoading] = useState(false) const [aboutDialogOpen, setAboutDialogOpen] = useState(false) const anchorRef = useRef(null) - const uploadRef = useRef(null) const handleClose = (event) => { if (anchorRef.current && anchorRef.current.contains(event.target)) { @@ -66,56 +55,6 @@ const ProfileSection = ({ username, handleLogout }) => { setOpen((prevOpen) => !prevOpen) } - const handleExportDB = async () => { - setOpen(false) - try { - const response = await databaseApi.getExportDatabase() - const exportItems = response.data - let dataStr = JSON.stringify(exportItems, null, 2) - let dataUri = 'data:application/json;charset=utf-8,' + encodeURIComponent(dataStr) - - let exportFileDefaultName = `DB.json` - - let linkElement = document.createElement('a') - linkElement.setAttribute('href', dataUri) - linkElement.setAttribute('download', exportFileDefaultName) - linkElement.click() - } catch (e) { - console.error(e) - } - } - - const handleFileUpload = (e) => { - if (!e.target.files) return - - const file = e.target.files[0] - const reader = new FileReader() - reader.onload = async (evt) => { - if (!evt?.target?.result) { - return - } - const { result } = evt.target - - if (result.includes(`"chatmessages":[`) && result.includes(`"chatflows":[`) && result.includes(`"apikeys":[`)) { - dispatch({ type: SET_MENU, opened: false }) - setLoading(true) - - try { - await databaseApi.createLoadDatabase(JSON.parse(result)) - setLoading(false) - navigate('/', { replace: true }) - navigate(0) - } catch (e) { - console.error(e) - setLoading(false) - } - } else { - alert('Incorrect Flowise Database Format') - } - } - reader.readAsText(file) - } - const prevOpen = useRef(open) useEffect(() => { if (prevOpen.current === true && open === false) { @@ -196,27 +135,6 @@ const ProfileSection = ({ username, handleLogout }) => { } }} > - { - setOpen(false) - uploadRef.current.click() - }} - > - - - - Load Database} /> - - - - - - Export Database} /> - { @@ -249,8 +167,6 @@ const ProfileSection = ({ username, handleLogout }) => { )} - handleFileUpload(e)} /> - setAboutDialogOpen(false)} /> ) diff --git a/packages/ui/src/ui-component/button/FlowListMenu.js b/packages/ui/src/ui-component/button/FlowListMenu.js index 2f5bdd5df..16bc86f24 100644 --- a/packages/ui/src/ui-component/button/FlowListMenu.js +++ b/packages/ui/src/ui-component/button/FlowListMenu.js @@ -23,7 +23,6 @@ import useConfirm from 'hooks/useConfirm' import { uiBaseURL } from '../../store/constant' import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '../../store/actions' -import ConfirmDialog from '../dialog/ConfirmDialog' import SaveChatflowDialog from '../dialog/SaveChatflowDialog' import TagDialog from '../dialog/TagDialog' @@ -286,7 +285,6 @@ export default function FlowListMenu({ chatflow, updateFlowsApi }) { Delete - { + return children.split('\n').reduce(function (arr, line) { + return arr.concat(line,
) + }, []) +} + +const Accordion = styled((props) => )(({ theme }) => ({ + border: `1px solid ${theme.palette.divider}`, + '&:not(:last-child)': { + borderBottom: 0 + }, + '&:before': { + display: 'none' + } +})) + +const AccordionSummary = styled((props) => ( + } {...props} /> +))(({ theme }) => ({ + backgroundColor: theme.palette.mode === 'dark' ? 'rgba(255, 255, 255, .05)' : 'rgba(0, 0, 0, .03)', + flexDirection: 'row-reverse', + '& .MuiAccordionSummary-expandIconWrapper.Mui-expanded': { + transform: 'rotate(180deg)' + }, + '& .MuiAccordionSummary-content': { + marginLeft: theme.spacing(1) + } +})) + +const AccordionDetails = styled(MuiAccordionDetails)(({ theme }) => ({ + padding: theme.spacing(2), + borderTop: '1px solid rgba(0, 0, 0, .125)' +})) + +const PromptLangsmithHubDialog = ({ promptType, show, onCancel, onSubmit }) => { + const portalElement = document.getElementById('portal') + const dispatch = useDispatch() + const customization = useSelector((state) => state.customization) + const getAvailablePromptsApi = useApi(promptApi.getAvailablePrompts) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [show, dispatch]) + + useEffect(() => { + if (promptType) { + getAvailablePromptsApi.request({ tags: promptType === 'template' ? 'StringPromptTemplate&' : 'ChatPromptTemplate&' }) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [promptType]) + + useEffect(() => { + if (getAvailablePromptsApi.data && getAvailablePromptsApi.data.repos) { + setAvailablePrompNameList(getAvailablePromptsApi.data.repos) + if (getAvailablePromptsApi.data.repos?.length) handleListItemClick(0, getAvailablePromptsApi.data.repos) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAvailablePromptsApi.data]) + + const ITEM_HEIGHT = 48 + const ITEM_PADDING_TOP = 8 + const MenuProps = { + PaperProps: { + style: { + maxHeight: ITEM_HEIGHT * 4.5 + ITEM_PADDING_TOP, + width: 250 + } + } + } + + const models = [ + { id: 101, name: 'anthropic:claude-instant-1' }, + { id: 102, name: 'anthropic:claude-instant-1.2' }, + { id: 103, name: 'anthropic:claude-2' }, + { id: 104, name: 'google:palm-2-chat-bison' }, + { id: 105, name: 'google:palm-2-codechat-bison' }, + { id: 106, name: 'google:palm-2-text-bison' }, + { id: 107, name: 'meta:llama-2-13b-chat' }, + { id: 108, name: 'meta:llama-2-70b-chat' }, + { id: 109, name: 'openai:gpt-3.5-turbo' }, + { id: 110, name: 'openai:gpt-4' }, + { id: 111, name: 'openai:text-davinci-003' } + ] + const [modelName, setModelName] = useState([]) + + const usecases = [ + { id: 201, name: 'Agents' }, + { id: 202, name: 'Agent Stimulation' }, + { id: 203, name: 'Autonomous agents' }, + { id: 204, name: 'Classification' }, + { id: 205, name: 'Chatbots' }, + { id: 206, name: 'Code understanding' }, + { id: 207, name: 'Code writing' }, + { id: 208, name: 'Evaluation' }, + { id: 209, name: 'Extraction' }, + { id: 210, name: 'Interacting with APIs' }, + { id: 211, name: 'Multi-modal' }, + { id: 212, name: 'QA over documents' }, + { id: 213, name: 'Self-checking' }, + { id: 214, name: 'SQL' }, + { id: 215, name: 'Summarization' }, + { id: 216, name: 'Tagging' } + ] + const [usecase, setUsecase] = useState([]) + + const languages = [ + { id: 301, name: 'Chinese' }, + { id: 302, name: 'English' }, + { id: 303, name: 'French' }, + { id: 304, name: 'German' }, + { id: 305, name: 'Russian' }, + { id: 306, name: 'Spanish' } + ] + const [language, setLanguage] = useState([]) + const [availablePrompNameList, setAvailablePrompNameList] = useState([]) + const [selectedPrompt, setSelectedPrompt] = useState({}) + + const [accordionExpanded, setAccordionExpanded] = useState(['prompt']) + + const handleAccordionChange = (accordionName) => (event, isExpanded) => { + const accordians = [...accordionExpanded] + if (!isExpanded) setAccordionExpanded(accordians.filter((accr) => accr !== accordionName)) + else { + accordians.push(accordionName) + setAccordionExpanded(accordians) + } + } + + const handleListItemClick = async (index, overridePromptNameList = []) => { + const prompt = overridePromptNameList.length ? overridePromptNameList[index] : availablePrompNameList[index] + + if (!prompt.detailed) { + const createResp = await promptApi.getPrompt({ + promptName: prompt.full_name + }) + if (createResp.data) { + prompt.detailed = createResp.data.templates + } + } + setSelectedPrompt(prompt) + } + + const fetchPrompts = async () => { + let tags = promptType === 'template' ? 'StringPromptTemplate&' : 'ChatPromptTemplate&' + modelName.forEach((item) => { + tags += `tags=${item.name}&` + }) + usecase.forEach((item) => { + tags += `tags=${item.name}&` + }) + language.forEach((item) => { + tags += `tags=${item.name}&` + }) + getAvailablePromptsApi.request({ tags: tags }) + } + + const removeDuplicates = (value) => { + let duplicateRemoved = [] + + value.forEach((item) => { + if (value.filter((o) => o.id === item.id).length === 1) { + duplicateRemoved.push(item) + } + }) + return duplicateRemoved + } + + const handleModelChange = (event) => { + const { + target: { value } + } = event + + setModelName(removeDuplicates(value)) + } + + const handleUsecaseChange = (event) => { + const { + target: { value } + } = event + + setUsecase(removeDuplicates(value)) + } + const handleLanguageChange = (event) => { + const { + target: { value } + } = event + + setLanguage(removeDuplicates(value)) + } + + const component = show ? ( + + + Langchain Hub ({promptType === 'template' ? 'PromptTemplate' : 'ChatPromptTemplate'}) + + + + + + Model + + + + + + Usecase + + + + + + Language + + + + + + + + + {availablePrompNameList && availablePrompNameList.length == 0 && ( + + + promptEmptySVG + +
No Available Prompts
+
+ )} + {availablePrompNameList && availablePrompNameList.length > 0 && ( + + + + + + + + + Available Prompts + + + {availablePrompNameList.map((item, index) => ( + handleListItemClick(index)} + > +
+ + {item.full_name} + +
+ {item.tags.map((tag, index) => ( + + ))} +
+
+
+ ))} +
+
+
+
+
+ + + + + + } + id='panel2d-header' + > + Prompt + + + + {selectedPrompt?.detailed?.map((item) => ( + <> + + {item.typeDisplay.toUpperCase()} + + +

+ {item.template} +

+
+ + ))} +
+
+
+ + } + id='panel1d-header' + > + Description + + + + {selectedPrompt?.description} + + + + + } + aria-controls='panel3d-content' + id='panel3d-header' + > + Readme + + +
+ + ) : ( + + {children} + + ) + } + }} + > + {selectedPrompt?.readme} + +
+
+
+
+
+
+
+
+
+
+ )} +
+ {availablePrompNameList && availablePrompNameList.length > 0 && ( + + + onSubmit(selectedPrompt.detailed)} + variant='contained' + > + Load + + + )} +
+ ) : null + + return createPortal(component, portalElement) +} + +PromptLangsmithHubDialog.propTypes = { + promptType: PropTypes.string, + show: PropTypes.bool, + onCancel: PropTypes.func, + onSubmit: PropTypes.func +} + +export default PromptLangsmithHubDialog diff --git a/packages/ui/src/ui-component/table/FlowListTable.js b/packages/ui/src/ui-component/table/FlowListTable.js index e3baa2e28..e1ac3b6c8 100644 --- a/packages/ui/src/ui-component/table/FlowListTable.js +++ b/packages/ui/src/ui-component/table/FlowListTable.js @@ -69,7 +69,9 @@ export const FlowListTable = ({ data, images, filterFunction, updateFlowsApi }) - + @@ -145,8 +147,8 @@ export const FlowListTable = ({ data, images, filterFunction, updateFlowsApi }) } FlowListTable.propTypes = { - data: PropTypes.object, - images: PropTypes.array, + data: PropTypes.array, + images: PropTypes.object, filterFunction: PropTypes.func, updateFlowsApi: PropTypes.object } diff --git a/packages/ui/src/views/canvas/AddNodes.js b/packages/ui/src/views/canvas/AddNodes.js index 0973cdda4..7bf3e7ff0 100644 --- a/packages/ui/src/views/canvas/AddNodes.js +++ b/packages/ui/src/views/canvas/AddNodes.js @@ -68,10 +68,14 @@ const AddNodes = ({ nodesData, node }) => { else newNodes.push(vsNode) } delete obj['Vector Stores'] - obj['Vector Stores;DEPRECATING'] = deprecatingNodes - accordianCategories['Vector Stores;DEPRECATING'] = isFilter ? true : false - obj['Vector Stores;NEW'] = newNodes - accordianCategories['Vector Stores;NEW'] = isFilter ? true : false + if (deprecatingNodes.length) { + obj['Vector Stores;DEPRECATING'] = deprecatingNodes + accordianCategories['Vector Stores;DEPRECATING'] = isFilter ? true : false + } + if (newNodes.length) { + obj['Vector Stores;NEW'] = newNodes + accordianCategories['Vector Stores;NEW'] = isFilter ? true : false + } setNodes(obj) } diff --git a/packages/ui/src/views/canvas/NodeInputHandler.js b/packages/ui/src/views/canvas/NodeInputHandler.js index 7eb31bdb1..892a6273d 100644 --- a/packages/ui/src/views/canvas/NodeInputHandler.js +++ b/packages/ui/src/views/canvas/NodeInputHandler.js @@ -6,6 +6,7 @@ import { useSelector } from 'react-redux' // material-ui import { useTheme, styled } from '@mui/material/styles' import { Box, Typography, Tooltip, IconButton, Button } from '@mui/material' +import IconAutoFixHigh from '@mui/icons-material/AutoFixHigh' import { tooltipClasses } from '@mui/material/Tooltip' import { IconArrowsMaximize, IconEdit, IconAlertTriangle } from '@tabler/icons' @@ -31,6 +32,7 @@ import { getInputVariables } from 'utils/genericHelper' // const import { FLOWISE_CREDENTIAL_ID } from 'store/constant' +import PromptLangsmithHubDialog from '../../ui-component/dialog/PromptLangsmithHubDialog' const EDITABLE_OPTIONS = ['selectedTool', 'selectedAssistant'] @@ -56,6 +58,7 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isA const [reloadTimestamp, setReloadTimestamp] = useState(Date.now().toString()) const [showFormatPromptValuesDialog, setShowFormatPromptValuesDialog] = useState(false) const [formatPromptValuesDialogProps, setFormatPromptValuesDialogProps] = useState({}) + const [showPromptHubDialog, setShowPromptHubDialog] = useState(false) const onExpandDialogClicked = (value, inputParam) => { const dialogProp = { @@ -69,6 +72,17 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isA setShowExpandDialog(true) } + const onShowPromptHubButtonClicked = () => { + setShowPromptHubDialog(true) + } + const onShowPromptHubButtonSubmit = (templates) => { + setShowPromptHubDialog(false) + for (const t of templates) { + if (Object.prototype.hasOwnProperty.call(data.inputs, t.type)) { + data.inputs[t.type] = t.template + } + } + } const onFormatPromptValuesClicked = (value, inputParam) => { // Preset values if the field is format prompt values let inputValue = value @@ -209,6 +223,31 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isA )} + {(data.name === 'promptTemplate' || data.name === 'chatPromptTemplate') && + (inputParam.name === 'template' || inputParam.name === 'systemMessagePrompt') && ( + <> + + setShowPromptHubDialog(false)} + onSubmit={onShowPromptHubButtonSubmit} + > + + )}
{inputParam.label} @@ -260,6 +299,7 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isA }} /> )} + {inputParam.type === 'file' && ( { try { await chatflowsApi.deleteChatflow(chatflow.id) localStorage.removeItem(`${chatflow.id}_INTERNAL`) - navigate(-1) + navigate('/') } catch (error) { const errorData = error.response.data || `${error.response.status}: ${error.response.statusText}` enqueueSnackbar({ diff --git a/packages/ui/src/views/chatflows/index.js b/packages/ui/src/views/chatflows/index.js index 3c4b89728..c87ad306c 100644 --- a/packages/ui/src/views/chatflows/index.js +++ b/packages/ui/src/views/chatflows/index.js @@ -12,6 +12,7 @@ import ItemCard from 'ui-component/cards/ItemCard' import { gridSpacing } from 'store/constant' import WorkflowEmptySVG from 'assets/images/workflow_empty.svg' import LoginDialog from 'ui-component/dialog/LoginDialog' +import ConfirmDialog from 'ui-component/dialog/ConfirmDialog' // API import chatflowsApi from 'api/chatflows' @@ -160,7 +161,6 @@ const Chatflows = () => { variant='contained' value='card' title='Card View' - selectedColor='#00abc0' > @@ -212,6 +212,7 @@ const Chatflows = () => { )} + ) }