Merge branch 'main' into feature/conversation-starters

This commit is contained in:
Henry 2023-12-12 22:34:42 +00:00
commit ebe24da14d
66 changed files with 2258 additions and 943 deletions

View File

@ -1,6 +1,6 @@
{
"name": "flowise",
"version": "1.4.2",
"version": "1.4.5",
"private": true,
"homepage": "https://flowiseai.com",
"workspaces": [

View File

@ -12,7 +12,7 @@ class LangfuseApi implements INodeCredential {
this.name = 'langfuseApi'
this.version = 1.0
this.description =
'Refer to <a target="_blank" href="https://langfuse.com/docs/get-started/">official guide</a> on how to get API key on Langfuse'
'Refer to <a target="_blank" href="https://langfuse.com/docs/flowise">integration guide</a> on how to get API keys on Langfuse'
this.inputs = [
{
label: 'Secret Key',

View File

@ -8,7 +8,7 @@ class RedisCacheApi implements INodeCredential {
inputs: INodeParams[]
constructor() {
this.label = 'Redis Cache API'
this.label = 'Redis API'
this.name = 'redisCacheApi'
this.version = 1.0
this.inputs = [

View File

@ -8,7 +8,7 @@ class RedisCacheUrlApi implements INodeCredential {
inputs: INodeParams[]
constructor() {
this.label = 'Redis Cache URL'
this.label = 'Redis URL'
this.name = 'redisCacheUrlApi'
this.version = 1.0
this.inputs = [
@ -16,7 +16,7 @@ class RedisCacheUrlApi implements INodeCredential {
label: 'Redis URL',
name: 'redisUrl',
type: 'string',
default: '127.0.0.1'
default: 'redis://localhost:6379'
}
]
}

View File

@ -3,7 +3,7 @@ import { initializeAgentExecutorWithOptions, AgentExecutor, InitializeAgentExecu
import { Tool } from 'langchain/tools'
import { BaseChatMemory } from 'langchain/memory'
import { getBaseClasses, mapChatHistory } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { BaseChatModel } from 'langchain/chat_models/base'
import { flatten } from 'lodash'
import { additionalCallbacks } from '../../../src/handler'
@ -29,7 +29,7 @@ class ConversationalAgent_Agents implements INode {
constructor() {
this.label = 'Conversational Agent'
this.name = 'conversationalAgent'
this.version = 1.0
this.version = 2.0
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'agent.svg'
@ -45,7 +45,7 @@ class ConversationalAgent_Agents implements INode {
{
label: 'Language Model',
name: 'model',
type: 'BaseLanguageModel'
type: 'BaseChatModel'
},
{
label: 'Memory',
@ -65,7 +65,7 @@ class ConversationalAgent_Agents implements INode {
}
async init(nodeData: INodeData): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel
const model = nodeData.inputs?.model as BaseChatModel
let tools = nodeData.inputs?.tools as Tool[]
tools = flatten(tools)
const memory = nodeData.inputs?.memory as BaseChatMemory
@ -92,8 +92,6 @@ class ConversationalAgent_Agents implements INode {
const executor = nodeData.instance as AgentExecutor
const memory = nodeData.inputs?.memory as BaseChatMemory
const callbacks = await additionalCallbacks(nodeData, options)
if (options && options.chatHistory) {
const chatHistoryClassName = memory.chatHistory.constructor.name
// Only replace when its In-Memory
@ -103,6 +101,10 @@ class ConversationalAgent_Agents implements INode {
}
}
;(executor.memory as any).returnMessages = true // Return true for BaseChatModel
const callbacks = await additionalCallbacks(nodeData, options)
const result = await executor.call({ input }, [...callbacks])
return result?.output
}

View File

@ -21,7 +21,7 @@ class ConversationalRetrievalAgent_Agents implements INode {
constructor() {
this.label = 'Conversational Retrieval Agent'
this.name = 'conversationalRetrievalAgent'
this.version = 1.0
this.version = 3.0
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'agent.svg'
@ -40,9 +40,9 @@ class ConversationalRetrievalAgent_Agents implements INode {
type: 'BaseChatMemory'
},
{
label: 'OpenAI Chat Model',
label: 'OpenAI/Azure Chat Model',
name: 'model',
type: 'ChatOpenAI'
type: 'BaseChatModel'
},
{
label: 'System Message',
@ -82,6 +82,8 @@ class ConversationalRetrievalAgent_Agents implements INode {
if (executor.memory) {
;(executor.memory as any).memoryKey = 'chat_history'
;(executor.memory as any).outputKey = 'output'
;(executor.memory as any).returnMessages = true
const chatHistoryClassName = (executor.memory as any).chatHistory.constructor.name
// Only replace when its In-Memory
if (chatHistoryClassName && chatHistoryClassName === 'ChatMessageHistory') {

View File

@ -8,6 +8,7 @@ import * as path from 'node:path'
import fetch from 'node-fetch'
import { flatten, uniqWith, isEqual } from 'lodash'
import { zodToJsonSchema } from 'zod-to-json-schema'
import { AnalyticHandler } from '../../../src/handler'
class OpenAIAssistant_Agents implements INode {
label: string
@ -23,7 +24,7 @@ class OpenAIAssistant_Agents implements INode {
constructor() {
this.label = 'OpenAI Assistant'
this.name = 'openAIAssistant'
this.version = 1.0
this.version = 2.0
this.type = 'OpenAIAssistant'
this.category = 'Agents'
this.icon = 'openai.png'
@ -41,6 +42,15 @@ class OpenAIAssistant_Agents implements INode {
name: 'tools',
type: 'Tool',
list: true
},
{
label: 'Disable File Download',
name: 'disableFileDownload',
type: 'boolean',
description:
'Messages can contain text, images, or files. In some cases, you may want to prevent others from downloading the files. Learn more from OpenAI File Annotation <a target="_blank" href="https://platform.openai.com/docs/assistants/how-it-works/managing-threads-and-messages">docs</a>',
optional: true,
additionalParams: true
}
]
}
@ -76,49 +86,54 @@ class OpenAIAssistant_Agents implements INode {
return null
}
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const selectedAssistantId = nodeData.inputs?.selectedAssistant as string
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
let sessionId = nodeData.inputs?.sessionId as string
//@ts-ignore
memoryMethods = {
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const selectedAssistantId = nodeData.inputs?.selectedAssistant as string
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
let sessionId = nodeData.inputs?.sessionId as string
const assistant = await appDataSource.getRepository(databaseEntities['Assistant']).findOneBy({
id: selectedAssistantId
})
if (!assistant) {
options.logger.error(`Assistant ${selectedAssistantId} not found`)
return
}
if (!sessionId && options.chatId) {
const chatmsg = await appDataSource.getRepository(databaseEntities['ChatMessage']).findOneBy({
chatId: options.chatId
const assistant = await appDataSource.getRepository(databaseEntities['Assistant']).findOneBy({
id: selectedAssistantId
})
if (!chatmsg) {
options.logger.error(`Chat Message with Chat Id: ${options.chatId} not found`)
if (!assistant) {
options.logger.error(`Assistant ${selectedAssistantId} not found`)
return
}
sessionId = chatmsg.sessionId
}
const credentialData = await getCredentialData(assistant.credential ?? '', options)
const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, nodeData)
if (!openAIApiKey) {
options.logger.error(`OpenAI ApiKey not found`)
return
}
if (!sessionId && options.chatId) {
const chatmsg = await appDataSource.getRepository(databaseEntities['ChatMessage']).findOneBy({
chatId: options.chatId
})
if (!chatmsg) {
options.logger.error(`Chat Message with Chat Id: ${options.chatId} not found`)
return
}
sessionId = chatmsg.sessionId
}
const openai = new OpenAI({ apiKey: openAIApiKey })
options.logger.info(`Clearing OpenAI Thread ${sessionId}`)
if (sessionId) await openai.beta.threads.del(sessionId)
options.logger.info(`Successfully cleared OpenAI Thread ${sessionId}`)
const credentialData = await getCredentialData(assistant.credential ?? '', options)
const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, nodeData)
if (!openAIApiKey) {
options.logger.error(`OpenAI ApiKey not found`)
return
}
const openai = new OpenAI({ apiKey: openAIApiKey })
options.logger.info(`Clearing OpenAI Thread ${sessionId}`)
if (sessionId) await openai.beta.threads.del(sessionId)
options.logger.info(`Successfully cleared OpenAI Thread ${sessionId}`)
}
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | object> {
const selectedAssistantId = nodeData.inputs?.selectedAssistant as string
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
const disableFileDownload = nodeData.inputs?.disableFileDownload as boolean
let tools = nodeData.inputs?.tools
tools = flatten(tools)
const formattedTools = tools?.map((tool: any) => formatToOpenAIAssistantTool(tool)) ?? []
@ -135,6 +150,11 @@ class OpenAIAssistant_Agents implements INode {
const openai = new OpenAI({ apiKey: openAIApiKey })
// Start analytics
const analyticHandlers = new AnalyticHandler(nodeData, options)
await analyticHandlers.init()
const parentIds = await analyticHandlers.onChainStart('OpenAIAssistant', input)
try {
const assistantDetails = JSON.parse(assistant.details)
const openAIAssistantId = assistantDetails.id
@ -157,7 +177,8 @@ class OpenAIAssistant_Agents implements INode {
}
const chatmessage = await appDataSource.getRepository(databaseEntities['ChatMessage']).findOneBy({
chatId: options.chatId
chatId: options.chatId,
chatflowid: options.chatflowid
})
let threadId = ''
@ -171,7 +192,7 @@ class OpenAIAssistant_Agents implements INode {
threadId = thread.id
}
// List all runs
// List all runs, in case existing thread is still running
if (!isNewThread) {
const promise = (threadId: string) => {
return new Promise<void>((resolve) => {
@ -207,6 +228,7 @@ class OpenAIAssistant_Agents implements INode {
})
// Run assistant thread
const llmIds = await analyticHandlers.onLLMStart('ChatOpenAI', input, parentIds)
const runThread = await openai.beta.threads.runs.create(threadId, {
assistant_id: retrievedAssistant.id
})
@ -239,7 +261,15 @@ class OpenAIAssistant_Agents implements INode {
for (let i = 0; i < actions.length; i += 1) {
const tool = tools.find((tool: any) => tool.name === actions[i].tool)
if (!tool) continue
// Start tool analytics
const toolIds = await analyticHandlers.onToolStart(tool.name, actions[i].toolInput, parentIds)
const toolOutput = await tool.call(actions[i].toolInput)
// End tool analytics
await analyticHandlers.onToolEnd(toolIds, toolOutput)
submitToolOutputs.push({
tool_call_id: actions[i].toolCallId,
output: toolOutput
@ -288,7 +318,9 @@ class OpenAIAssistant_Agents implements INode {
runThreadId = newRunThread.id
state = await promise(threadId, newRunThread.id)
} else {
throw new Error(`Error processing thread: ${state}, Thread ID: ${threadId}`)
const errMsg = `Error processing thread: ${state}, Thread ID: ${threadId}`
await analyticHandlers.onChainError(parentIds, errMsg)
throw new Error(errMsg)
}
}
@ -310,7 +342,7 @@ class OpenAIAssistant_Agents implements INode {
const dirPath = path.join(getUserHome(), '.flowise', 'openai-assistant')
// Iterate over the annotations and add footnotes
// Iterate over the annotations
for (let index = 0; index < annotations.length; index++) {
const annotation = annotations[index]
let filePath = ''
@ -323,11 +355,13 @@ class OpenAIAssistant_Agents implements INode {
// eslint-disable-next-line no-useless-escape
const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename
filePath = path.join(getUserHome(), '.flowise', 'openai-assistant', fileName)
await downloadFile(cited_file, filePath, dirPath, openAIApiKey)
fileAnnotations.push({
filePath,
fileName
})
if (!disableFileDownload) {
await downloadFile(cited_file, filePath, dirPath, openAIApiKey)
fileAnnotations.push({
filePath,
fileName
})
}
} else {
const file_path = (annotation as OpenAI.Beta.Threads.Messages.MessageContentText.Text.FilePath).file_path
if (file_path) {
@ -335,22 +369,30 @@ class OpenAIAssistant_Agents implements INode {
// eslint-disable-next-line no-useless-escape
const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename
filePath = path.join(getUserHome(), '.flowise', 'openai-assistant', fileName)
await downloadFile(cited_file, filePath, dirPath, openAIApiKey)
fileAnnotations.push({
filePath,
fileName
})
if (!disableFileDownload) {
await downloadFile(cited_file, filePath, dirPath, openAIApiKey)
fileAnnotations.push({
filePath,
fileName
})
}
}
}
// Replace the text with a footnote
message_content.value = message_content.value.replace(`${annotation.text}`, `${filePath}`)
message_content.value = message_content.value.replace(
`${annotation.text}`,
`${disableFileDownload ? '' : filePath}`
)
}
returnVal += message_content.value
} else {
returnVal += content.text.value
}
const lenticularBracketRegex = /【[^】]*】/g
returnVal = returnVal.replace(lenticularBracketRegex, '')
} else {
const content = assistantMessages[0].content[i] as MessageContentImageFile
const fileId = content.image_file.file_id
@ -363,11 +405,18 @@ class OpenAIAssistant_Agents implements INode {
const bitmap = fsDefault.readFileSync(filePath)
const base64String = Buffer.from(bitmap).toString('base64')
// TODO: Use a file path and retrieve image on the fly. Storing as base64 to localStorage and database will easily hit limits
const imgHTML = `<img src="data:image/png;base64,${base64String}" width="100%" height="max-content" alt="${fileObj.filename}" /><br/>`
returnVal += imgHTML
}
}
const imageRegex = /<img[^>]*\/>/g
let llmOutput = returnVal.replace(imageRegex, '')
llmOutput = llmOutput.replace('<br/>', '')
await analyticHandlers.onLLMEnd(llmIds, llmOutput)
await analyticHandlers.onChainEnd(parentIds, messageData, true)
return {
text: returnVal,
usedTools,
@ -375,6 +424,7 @@ class OpenAIAssistant_Agents implements INode {
assistant: { assistantId: openAIAssistantId, threadId, runId: runThreadId, messages: messageData }
}
} catch (error) {
await analyticHandlers.onChainError(parentIds, error, true)
throw new Error(error)
}
}

View File

@ -20,11 +20,11 @@ class OpenAIFunctionAgent_Agents implements INode {
constructor() {
this.label = 'OpenAI Function Agent'
this.name = 'openAIFunctionAgent'
this.version = 1.0
this.version = 3.0
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'openai.png'
this.description = `An agent that uses OpenAI's Function Calling functionality to pick the tool and args to call`
this.description = `An agent that uses Function Calling to pick the tool and args to call`
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.inputs = [
{
@ -39,10 +39,8 @@ class OpenAIFunctionAgent_Agents implements INode {
type: 'BaseChatMemory'
},
{
label: 'OpenAI Chat Model',
label: 'OpenAI/Azure Chat Model',
name: 'model',
description:
'Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer <a target="_blank" href="https://platform.openai.com/docs/guides/gpt/function-calling">docs</a> for more info',
type: 'BaseChatModel'
},
{
@ -89,6 +87,8 @@ class OpenAIFunctionAgent_Agents implements INode {
}
}
;(executor.memory as any).returnMessages = true // Return true for BaseChatModel
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const callbacks = await additionalCallbacks(nodeData, options)

View File

@ -89,7 +89,7 @@ class RedisCache implements INode {
redisClient.update = async (prompt: string, llmKey: string, value: Generation[]) => {
for (let i = 0; i < value.length; i += 1) {
const key = getCacheKey(prompt, llmKey, String(i))
if (ttl !== undefined) {
if (ttl) {
await client.set(key, JSON.stringify(serializeGeneration(value[i])), 'EX', parseInt(ttl, 10))
} else {
await client.set(key, JSON.stringify(serializeGeneration(value[i])))

View File

@ -106,16 +106,18 @@ class ConversationChain_Chains implements INode {
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const chain = nodeData.instance as ConversationChain
const memory = nodeData.inputs?.memory as BufferMemory
memory.returnMessages = true // Return true for BaseChatModel
if (options && options.chatHistory) {
const chatHistoryClassName = memory.chatHistory.constructor.name
// Only replace when its In-Memory
if (chatHistoryClassName && chatHistoryClassName === 'ChatMessageHistory') {
memory.chatHistory = mapChatHistory(options)
chain.memory = memory
}
}
chain.memory = memory
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const callbacks = await additionalCallbacks(nodeData, options)

View File

@ -1,7 +1,7 @@
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses, handleEscapeCharacters } from '../../../src/utils'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { BaseLanguageModel, BaseLanguageModelCallOptions } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { BaseOutputParser } from 'langchain/schema/output_parser'
import { formatResponse, injectOutputParser } from '../../outputparsers/OutputParserHelpers'
@ -141,7 +141,7 @@ class LLMChain_Chains implements INode {
const runPrediction = async (
inputVariables: string[],
chain: LLMChain<string | object>,
chain: LLMChain<string | object | BaseLanguageModel<any, BaseLanguageModelCallOptions>>,
input: string,
promptValuesRaw: ICommonObject | undefined,
options: ICommonObject,
@ -164,7 +164,7 @@ const runPrediction = async (
if (moderations && moderations.length > 0) {
try {
// Use the output of the moderation chain as input for the LLM chain
input = await checkInputs(moderations, chain.llm, input)
input = await checkInputs(moderations, input)
} catch (e) {
await new Promise((resolve) => setTimeout(resolve, 500))
streamResponse(isStreaming, e.message, socketIO, socketIOClientId)

View File

@ -27,7 +27,7 @@ class AWSChatBedrock_ChatModels implements INode {
constructor() {
this.label = 'AWS Bedrock'
this.name = 'awsChatBedrock'
this.version = 2.0
this.version = 3.0
this.type = 'AWSChatBedrock'
this.icon = 'awsBedrock.png'
this.category = 'Chat Models'
@ -97,7 +97,8 @@ class AWSChatBedrock_ChatModels implements INode {
options: [
{ label: 'anthropic.claude-instant-v1', name: 'anthropic.claude-instant-v1' },
{ label: 'anthropic.claude-v1', name: 'anthropic.claude-v1' },
{ label: 'anthropic.claude-v2', name: 'anthropic.claude-v2' }
{ label: 'anthropic.claude-v2', name: 'anthropic.claude-v2' },
{ label: 'meta.llama2-13b-chat-v1', name: 'meta.llama2-13b-chat-v1' }
],
default: 'anthropic.claude-v2'
},
@ -128,12 +129,14 @@ class AWSChatBedrock_ChatModels implements INode {
const iTemperature = nodeData.inputs?.temperature as string
const iMax_tokens_to_sample = nodeData.inputs?.max_tokens_to_sample as string
const cache = nodeData.inputs?.cache as BaseCache
const streaming = nodeData.inputs?.streaming as boolean
const obj: BaseBedrockInput & BaseLLMParams = {
region: iRegion,
model: iModel,
maxTokens: parseInt(iMax_tokens_to_sample, 10),
temperature: parseFloat(iTemperature)
temperature: parseFloat(iTemperature),
streaming: streaming ?? true
}
/**

View File

@ -162,8 +162,11 @@ class S3_DocumentLoaders implements INode {
accessKeyId?: string
secretAccessKey?: string
} = {
accessKeyId,
secretAccessKey
region,
credentials: {
accessKeyId,
secretAccessKey
}
}
loader.load = async () => {

View File

@ -18,7 +18,7 @@ class AWSBedrockEmbedding_Embeddings implements INode {
constructor() {
this.label = 'AWS Bedrock Embeddings'
this.name = 'AWSBedrockEmbeddings'
this.version = 1.0
this.version = 2.0
this.type = 'AWSBedrockEmbeddings'
this.icon = 'awsBedrock.png'
this.category = 'Embeddings'
@ -81,7 +81,9 @@ class AWSBedrockEmbedding_Embeddings implements INode {
type: 'options',
options: [
{ label: 'amazon.titan-embed-text-v1', name: 'amazon.titan-embed-text-v1' },
{ label: 'amazon.titan-embed-g1-text-02', name: 'amazon.titan-embed-g1-text-02' }
{ label: 'amazon.titan-embed-g1-text-02', name: 'amazon.titan-embed-g1-text-02' },
{ label: 'cohere.embed-english-v3', name: 'cohere.embed-english-v3' },
{ label: 'cohere.embed-multilingual-v3', name: 'cohere.embed-multilingual-v3' }
],
default: 'amazon.titan-embed-text-v1'
}

View File

@ -27,7 +27,7 @@ class AWSBedrock_LLMs implements INode {
constructor() {
this.label = 'AWS Bedrock'
this.name = 'awsBedrock'
this.version = 1.2
this.version = 2.0
this.type = 'AWSBedrock'
this.icon = 'awsBedrock.png'
this.category = 'LLMs'
@ -98,6 +98,7 @@ class AWSBedrock_LLMs implements INode {
{ label: 'amazon.titan-tg1-large', name: 'amazon.titan-tg1-large' },
{ label: 'amazon.titan-e1t-medium', name: 'amazon.titan-e1t-medium' },
{ label: 'cohere.command-text-v14', name: 'cohere.command-text-v14' },
{ label: 'cohere.command-light-text-v14', name: 'cohere.command-light-text-v14' },
{ label: 'ai21.j2-grande-instruct', name: 'ai21.j2-grande-instruct' },
{ label: 'ai21.j2-jumbo-instruct', name: 'ai21.j2-jumbo-instruct' },
{ label: 'ai21.j2-mid', name: 'ai21.j2-mid' },

View File

@ -1,4 +1,13 @@
import { ICommonObject, INode, INodeData, INodeParams, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src'
import {
ICommonObject,
INode,
INodeData,
INodeParams,
getBaseClasses,
getCredentialData,
getCredentialParam,
serializeChatHistory
} from '../../../src'
import { DynamoDBChatMessageHistory } from 'langchain/stores/message/dynamodb'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
@ -70,13 +79,23 @@ class DynamoDb_Memory implements INode {
return initalizeDynamoDB(nodeData, options)
}
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const dynamodbMemory = await initalizeDynamoDB(nodeData, options)
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
options.logger.info(`Clearing DynamoDb memory session ${sessionId ? sessionId : chatId}`)
await dynamodbMemory.clear()
options.logger.info(`Successfully cleared DynamoDb memory session ${sessionId ? sessionId : chatId}`)
//@ts-ignore
memoryMethods = {
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const dynamodbMemory = await initalizeDynamoDB(nodeData, options)
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
options.logger.info(`Clearing DynamoDb memory session ${sessionId ? sessionId : chatId}`)
await dynamodbMemory.clear()
options.logger.info(`Successfully cleared DynamoDb memory session ${sessionId ? sessionId : chatId}`)
},
async getChatMessages(nodeData: INodeData, options: ICommonObject): Promise<string> {
const memoryKey = nodeData.inputs?.memoryKey as string
const dynamodbMemory = await initalizeDynamoDB(nodeData, options)
const key = memoryKey ?? 'chat_history'
const memoryResult = await dynamodbMemory.loadMemoryVariables({})
return serializeChatHistory(memoryResult[key])
}
}
}
@ -109,9 +128,8 @@ const initalizeDynamoDB = async (nodeData: INodeData, options: ICommonObject): P
})
const memory = new BufferMemoryExtended({
memoryKey,
memoryKey: memoryKey ?? 'chat_history',
chatHistory: dynamoDb,
returnMessages: true,
isSessionIdUsingChatMessageId
})
return memory

View File

@ -1,4 +1,13 @@
import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src'
import {
getBaseClasses,
getCredentialData,
getCredentialParam,
ICommonObject,
INode,
INodeData,
INodeParams,
serializeChatHistory
} from '../../../src'
import { MongoDBChatMessageHistory } from 'langchain/stores/message/mongodb'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
import { BaseMessage, mapStoredMessageToChatMessage } from 'langchain/schema'
@ -67,13 +76,23 @@ class MongoDB_Memory implements INode {
return initializeMongoDB(nodeData, options)
}
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const mongodbMemory = await initializeMongoDB(nodeData, options)
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
options.logger.info(`Clearing MongoDB memory session ${sessionId ? sessionId : chatId}`)
await mongodbMemory.clear()
options.logger.info(`Successfully cleared MongoDB memory session ${sessionId ? sessionId : chatId}`)
//@ts-ignore
memoryMethods = {
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const mongodbMemory = await initializeMongoDB(nodeData, options)
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
options.logger.info(`Clearing MongoDB memory session ${sessionId ? sessionId : chatId}`)
await mongodbMemory.clear()
options.logger.info(`Successfully cleared MongoDB memory session ${sessionId ? sessionId : chatId}`)
},
async getChatMessages(nodeData: INodeData, options: ICommonObject): Promise<string> {
const memoryKey = nodeData.inputs?.memoryKey as string
const mongodbMemory = await initializeMongoDB(nodeData, options)
const key = memoryKey ?? 'chat_history'
const memoryResult = await mongodbMemory.loadMemoryVariables({})
return serializeChatHistory(memoryResult[key])
}
}
}
@ -123,9 +142,8 @@ const initializeMongoDB = async (nodeData: INodeData, options: ICommonObject): P
}
return new BufferMemoryExtended({
memoryKey,
memoryKey: memoryKey ?? 'chat_history',
chatHistory: mongoDBChatMessageHistory,
returnMessages: true,
isSessionIdUsingChatMessageId
})
}

View File

@ -3,6 +3,7 @@ import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../
import { ICommonObject } from '../../../src'
import { MotorheadMemory, MotorheadMemoryInput } from 'langchain/memory'
import fetch from 'node-fetch'
import { getBufferString } from 'langchain/memory'
class MotorMemory_Memory implements INode {
label: string
@ -64,13 +65,23 @@ class MotorMemory_Memory implements INode {
return initalizeMotorhead(nodeData, options)
}
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const motorhead = await initalizeMotorhead(nodeData, options)
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
options.logger.info(`Clearing Motorhead memory session ${sessionId ? sessionId : chatId}`)
await motorhead.clear()
options.logger.info(`Successfully cleared Motorhead memory session ${sessionId ? sessionId : chatId}`)
//@ts-ignore
memoryMethods = {
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const motorhead = await initalizeMotorhead(nodeData, options)
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
options.logger.info(`Clearing Motorhead memory session ${sessionId ? sessionId : chatId}`)
await motorhead.clear()
options.logger.info(`Successfully cleared Motorhead memory session ${sessionId ? sessionId : chatId}`)
},
async getChatMessages(nodeData: INodeData, options: ICommonObject): Promise<string> {
const memoryKey = nodeData.inputs?.memoryKey as string
const motorhead = await initalizeMotorhead(nodeData, options)
const key = memoryKey ?? 'chat_history'
const memoryResult = await motorhead.loadMemoryVariables({})
return getBufferString(memoryResult[key])
}
}
}

View File

@ -1,5 +1,5 @@
import { INode, INodeData, INodeParams, ICommonObject } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { getBaseClasses, getCredentialData, getCredentialParam, serializeChatHistory } from '../../../src/utils'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
import { RedisChatMessageHistory, RedisChatMessageHistoryInput } from 'langchain/stores/message/ioredis'
import { mapStoredMessageToChatMessage, BaseMessage } from 'langchain/schema'
@ -65,13 +65,23 @@ class RedisBackedChatMemory_Memory implements INode {
return await initalizeRedis(nodeData, options)
}
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const redis = await initalizeRedis(nodeData, options)
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
options.logger.info(`Clearing Redis memory session ${sessionId ? sessionId : chatId}`)
await redis.clear()
options.logger.info(`Successfully cleared Redis memory session ${sessionId ? sessionId : chatId}`)
//@ts-ignore
memoryMethods = {
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const redis = await initalizeRedis(nodeData, options)
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
options.logger.info(`Clearing Redis memory session ${sessionId ? sessionId : chatId}`)
await redis.clear()
options.logger.info(`Successfully cleared Redis memory session ${sessionId ? sessionId : chatId}`)
},
async getChatMessages(nodeData: INodeData, options: ICommonObject): Promise<string> {
const memoryKey = nodeData.inputs?.memoryKey as string
const redis = await initalizeRedis(nodeData, options)
const key = memoryKey ?? 'chat_history'
const memoryResult = await redis.loadMemoryVariables({})
return serializeChatHistory(memoryResult[key])
}
}
}
@ -137,7 +147,7 @@ const initalizeRedis = async (nodeData: INodeData, options: ICommonObject): Prom
}
const memory = new BufferMemoryExtended({
memoryKey,
memoryKey: memoryKey ?? 'chat_history',
chatHistory: redisChatMessageHistory,
isSessionIdUsingChatMessageId
})

View File

@ -1,5 +1,5 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { getBaseClasses, getCredentialData, getCredentialParam, serializeChatHistory } from '../../../src/utils'
import { ICommonObject } from '../../../src'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
import { UpstashRedisChatMessageHistory } from 'langchain/stores/message/upstash_redis'
@ -63,13 +63,22 @@ class UpstashRedisBackedChatMemory_Memory implements INode {
return initalizeUpstashRedis(nodeData, options)
}
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const redis = await initalizeUpstashRedis(nodeData, options)
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
options.logger.info(`Clearing Upstash Redis memory session ${sessionId ? sessionId : chatId}`)
await redis.clear()
options.logger.info(`Successfully cleared Upstash Redis memory session ${sessionId ? sessionId : chatId}`)
//@ts-ignore
memoryMethods = {
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const redis = await initalizeUpstashRedis(nodeData, options)
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
options.logger.info(`Clearing Upstash Redis memory session ${sessionId ? sessionId : chatId}`)
await redis.clear()
options.logger.info(`Successfully cleared Upstash Redis memory session ${sessionId ? sessionId : chatId}`)
},
async getChatMessages(nodeData: INodeData, options: ICommonObject): Promise<string> {
const redis = await initalizeUpstashRedis(nodeData, options)
const key = 'chat_history'
const memoryResult = await redis.loadMemoryVariables({})
return serializeChatHistory(memoryResult[key])
}
}
}
@ -95,6 +104,7 @@ const initalizeUpstashRedis = async (nodeData: INodeData, options: ICommonObject
})
const memory = new BufferMemoryExtended({
memoryKey: 'chat_history',
chatHistory: redisChatMessageHistory,
isSessionIdUsingChatMessageId
})

View File

@ -1,7 +1,7 @@
import { SystemMessage } from 'langchain/schema'
import { ZepMemory, ZepMemoryInput } from 'langchain/memory/zep'
import { getBufferString, InputValues, MemoryVariables, OutputValues } from 'langchain/memory'
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ZepMemory, ZepMemoryInput } from 'langchain/memory/zep'
import { ICommonObject } from '../../../src'
class ZepMemory_Memory implements INode {
@ -19,7 +19,7 @@ class ZepMemory_Memory implements INode {
constructor() {
this.label = 'Zep Memory'
this.name = 'ZepMemory'
this.version = 1.0
this.version = 2.0
this.type = 'ZepMemory'
this.icon = 'zep.png'
this.category = 'Memory'
@ -40,17 +40,12 @@ class ZepMemory_Memory implements INode {
type: 'string',
default: 'http://127.0.0.1:8000'
},
{
label: 'Auto Summary',
name: 'autoSummary',
type: 'boolean',
default: true
},
{
label: 'Session Id',
name: 'sessionId',
type: 'string',
description: 'If not specified, the first CHAT_MESSAGE_ID will be used as sessionId',
description:
'If not specified, a random id will be used. Learn <a target="_blank" href="https://docs.flowiseai.com/memory/long-term-memory#ui-and-embedded-chat">more</a>',
default: '',
additionalParams: true,
optional: true
@ -59,15 +54,10 @@ class ZepMemory_Memory implements INode {
label: 'Size',
name: 'k',
type: 'number',
default: '10',
description: 'Window of size k to surface the last k back-and-forth to use as memory.'
},
{
label: 'Auto Summary Template',
name: 'autoSummaryTemplate',
type: 'string',
default: 'This is the summary of the following conversation:\n{summary}',
additionalParams: true
placeholder: '10',
description: 'Window of size k to surface the last k back-and-forth to use as memory.',
additionalParams: true,
optional: true
},
{
label: 'AI Prefix',
@ -108,45 +98,28 @@ class ZepMemory_Memory implements INode {
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const autoSummaryTemplate = nodeData.inputs?.autoSummaryTemplate as string
const autoSummary = nodeData.inputs?.autoSummary as boolean
const k = nodeData.inputs?.k as string
let zep = await initalizeZep(nodeData, options)
// hack to support summary
let tmpFunc = zep.loadMemoryVariables
zep.loadMemoryVariables = async (values) => {
let data = await tmpFunc.bind(zep, values)()
if (autoSummary && zep.returnMessages && data[zep.memoryKey] && data[zep.memoryKey].length) {
const zepClient = await zep.zepClientPromise
const memory = await zepClient.memory.getMemory(zep.sessionId, parseInt(k, 10) ?? 10)
if (memory?.summary) {
let summary = autoSummaryTemplate.replace(/{summary}/g, memory.summary.content)
// eslint-disable-next-line no-console
console.log('[ZepMemory] auto summary:', summary)
data[zep.memoryKey].unshift(new SystemMessage(summary))
}
}
// for langchain zep memory compatibility, or we will get "Missing value for input variable chat_history"
if (data instanceof Array) {
data = {
[zep.memoryKey]: data
}
}
return data
}
return zep
return await initalizeZep(nodeData, options)
}
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const zep = await initalizeZep(nodeData, options)
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
options.logger.info(`Clearing Zep memory session ${sessionId ? sessionId : chatId}`)
await zep.clear()
options.logger.info(`Successfully cleared Zep memory session ${sessionId ? sessionId : chatId}`)
//@ts-ignore
memoryMethods = {
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const zep = await initalizeZep(nodeData, options)
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
options.logger.info(`Clearing Zep memory session ${sessionId ? sessionId : chatId}`)
await zep.clear()
options.logger.info(`Successfully cleared Zep memory session ${sessionId ? sessionId : chatId}`)
},
async getChatMessages(nodeData: INodeData, options: ICommonObject): Promise<string> {
const memoryKey = nodeData.inputs?.memoryKey as string
const aiPrefix = nodeData.inputs?.aiPrefix as string
const humanPrefix = nodeData.inputs?.humanPrefix as string
const zep = await initalizeZep(nodeData, options)
const key = memoryKey ?? 'chat_history'
const memoryResult = await zep.loadMemoryVariables({})
return getBufferString(memoryResult[key], humanPrefix, aiPrefix)
}
}
}
@ -156,40 +129,72 @@ const initalizeZep = async (nodeData: INodeData, options: ICommonObject): Promis
const humanPrefix = nodeData.inputs?.humanPrefix as string
const memoryKey = nodeData.inputs?.memoryKey as string
const inputKey = nodeData.inputs?.inputKey as string
const sessionId = nodeData.inputs?.sessionId as string
const k = nodeData.inputs?.k as string
const chatId = options?.chatId as string
let isSessionIdUsingChatMessageId = false
if (!sessionId && chatId) isSessionIdUsingChatMessageId = true
let sessionId = ''
if (!nodeData.inputs?.sessionId && chatId) {
isSessionIdUsingChatMessageId = true
sessionId = chatId
} else {
sessionId = nodeData.inputs?.sessionId
}
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
const obj: ZepMemoryInput & Partial<ZepMemoryExtendedInput> = {
const obj: ZepMemoryInput & ZepMemoryExtendedInput = {
baseURL,
sessionId: sessionId ? sessionId : chatId,
aiPrefix,
humanPrefix,
returnMessages: true,
memoryKey,
inputKey
inputKey,
isSessionIdUsingChatMessageId,
k: k ? parseInt(k, 10) : undefined
}
if (apiKey) obj.apiKey = apiKey
if (isSessionIdUsingChatMessageId) obj.isSessionIdUsingChatMessageId = true
return new ZepMemoryExtended(obj)
}
interface ZepMemoryExtendedInput {
isSessionIdUsingChatMessageId: boolean
k?: number
}
class ZepMemoryExtended extends ZepMemory {
isSessionIdUsingChatMessageId? = false
lastN?: number
constructor(fields: ZepMemoryInput & Partial<ZepMemoryExtendedInput>) {
constructor(fields: ZepMemoryInput & ZepMemoryExtendedInput) {
super(fields)
this.isSessionIdUsingChatMessageId = fields.isSessionIdUsingChatMessageId
this.lastN = fields.k
}
async loadMemoryVariables(values: InputValues, overrideSessionId = ''): Promise<MemoryVariables> {
if (overrideSessionId) {
super.sessionId = overrideSessionId
}
return super.loadMemoryVariables({ ...values, lastN: this.lastN })
}
async saveContext(inputValues: InputValues, outputValues: OutputValues, overrideSessionId = ''): Promise<void> {
if (overrideSessionId) {
super.sessionId = overrideSessionId
}
return super.saveContext(inputValues, outputValues)
}
async clear(overrideSessionId = ''): Promise<void> {
if (overrideSessionId) {
super.sessionId = overrideSessionId
}
return super.clear()
}
}

View File

@ -1,13 +1,12 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { Server } from 'socket.io'
export abstract class Moderation {
abstract checkForViolations(llm: BaseLanguageModel, input: string): Promise<string>
abstract checkForViolations(input: string): Promise<string>
}
export const checkInputs = async (inputModerations: Moderation[], llm: BaseLanguageModel, input: string): Promise<string> => {
export const checkInputs = async (inputModerations: Moderation[], input: string): Promise<string> => {
for (const moderation of inputModerations) {
input = await moderation.checkForViolations(llm, input)
input = await moderation.checkForViolations(input)
}
return input
}

View File

@ -1,5 +1,5 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src'
import { Moderation } from '../Moderation'
import { OpenAIModerationRunner } from './OpenAIModerationRunner'
@ -12,6 +12,7 @@ class OpenAIModeration implements INode {
icon: string
category: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]
constructor() {
@ -19,10 +20,16 @@ class OpenAIModeration implements INode {
this.name = 'inputModerationOpenAI'
this.version = 1.0
this.type = 'Moderation'
this.icon = 'openai-moderation.png'
this.icon = 'openai.png'
this.category = 'Moderation'
this.description = 'Check whether content complies with OpenAI usage policies.'
this.baseClasses = [this.type, ...getBaseClasses(Moderation)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['openAIApi']
}
this.inputs = [
{
label: 'Error Message',
@ -35,8 +42,11 @@ class OpenAIModeration implements INode {
]
}
async init(nodeData: INodeData): Promise<any> {
const runner = new OpenAIModerationRunner()
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, nodeData)
const runner = new OpenAIModerationRunner(openAIApiKey)
const moderationErrorMessage = nodeData.inputs?.moderationErrorMessage as string
if (moderationErrorMessage) runner.setErrorMessage(moderationErrorMessage)
return runner

View File

@ -1,18 +1,21 @@
import { Moderation } from '../Moderation'
import { BaseLanguageModel } from 'langchain/base_language'
import { OpenAIModerationChain } from 'langchain/chains'
export class OpenAIModerationRunner implements Moderation {
private openAIApiKey = ''
private moderationErrorMessage: string = "Text was found that violates OpenAI's content policy."
async checkForViolations(llm: BaseLanguageModel, input: string): Promise<string> {
const openAIApiKey = (llm as any).openAIApiKey
if (!openAIApiKey) {
constructor(openAIApiKey: string) {
this.openAIApiKey = openAIApiKey
}
async checkForViolations(input: string): Promise<string> {
if (!this.openAIApiKey) {
throw Error('OpenAI API key not found')
}
// Create a new instance of the OpenAIModerationChain
const moderation = new OpenAIModerationChain({
openAIApiKey: openAIApiKey,
openAIApiKey: this.openAIApiKey,
throwError: false // If set to true, the call will throw an error when the moderation chain detects violating content. If set to false, violating content will return "Text was found that violates OpenAI's content policy.".
})
// Send the user's input to the moderation chain and wait for the result

Binary file not shown.

Before

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

@ -1,5 +1,4 @@
import { Moderation } from '../Moderation'
import { BaseLanguageModel } from 'langchain/base_language'
export class SimplePromptModerationRunner implements Moderation {
private readonly denyList: string = ''
@ -13,9 +12,9 @@ export class SimplePromptModerationRunner implements Moderation {
this.moderationErrorMessage = moderationErrorMessage
}
async checkForViolations(_: BaseLanguageModel, input: string): Promise<string> {
async checkForViolations(input: string): Promise<string> {
this.denyList.split('\n').forEach((denyListItem) => {
if (denyListItem && denyListItem !== '' && input.includes(denyListItem)) {
if (denyListItem && denyListItem !== '' && input.toLowerCase().includes(denyListItem.toLowerCase())) {
throw Error(this.moderationErrorMessage)
}
})

View File

@ -1,6 +1,6 @@
import { BaseOutputParser } from 'langchain/schema/output_parser'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { BaseLanguageModel, BaseLanguageModelCallOptions } from 'langchain/base_language'
import { ICommonObject } from '../../src'
import { ChatPromptTemplate, FewShotPromptTemplate, PromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts'
@ -15,7 +15,7 @@ export const formatResponse = (response: string | object): string | object => {
export const injectOutputParser = (
outputParser: BaseOutputParser<unknown>,
chain: LLMChain<string, BaseLanguageModel>,
chain: LLMChain<string | object | BaseLanguageModel<any, BaseLanguageModelCallOptions>>,
promptValues: ICommonObject | undefined = undefined
) => {
if (outputParser && chain.prompt) {

View File

@ -18,7 +18,7 @@ class HydeRetriever_Retrievers implements INode {
constructor() {
this.label = 'Hyde Retriever'
this.name = 'HydeRetriever'
this.version = 1.0
this.version = 2.0
this.type = 'HydeRetriever'
this.icon = 'hyderetriever.svg'
this.category = 'Retrievers'
@ -36,41 +36,66 @@ class HydeRetriever_Retrievers implements INode {
type: 'VectorStore'
},
{
label: 'Prompt Key',
label: 'Select Defined Prompt',
name: 'promptKey',
description: 'Select a pre-defined prompt',
type: 'options',
options: [
{
label: 'websearch',
name: 'websearch'
name: 'websearch',
description: `Please write a passage to answer the question
Question: {question}
Passage:`
},
{
label: 'scifact',
name: 'scifact'
name: 'scifact',
description: `Please write a scientific paper passage to support/refute the claim
Claim: {question}
Passage:`
},
{
label: 'arguana',
name: 'arguana'
name: 'arguana',
description: `Please write a counter argument for the passage
Passage: {question}
Counter Argument:`
},
{
label: 'trec-covid',
name: 'trec-covid'
name: 'trec-covid',
description: `Please write a scientific paper passage to answer the question
Question: {question}
Passage:`
},
{
label: 'fiqa',
name: 'fiqa'
name: 'fiqa',
description: `Please write a financial article passage to answer the question
Question: {question}
Passage:`
},
{
label: 'dbpedia-entity',
name: 'dbpedia-entity'
name: 'dbpedia-entity',
description: `Please write a passage to answer the question.
Question: {question}
Passage:`
},
{
label: 'trec-news',
name: 'trec-news'
name: 'trec-news',
description: `Please write a news passage about the topic.
Topic: {question}
Passage:`
},
{
label: 'mr-tydi',
name: 'mr-tydi'
name: 'mr-tydi',
description: `Please write a passage in Swahili/Korean/Japanese/Bengali to answer the question in detail.
Question: {question}
Passage:`
}
],
default: 'websearch'
@ -78,7 +103,7 @@ class HydeRetriever_Retrievers implements INode {
{
label: 'Custom Prompt',
name: 'customPrompt',
description: 'If custom prompt is used, this will override Prompt Key',
description: 'If custom prompt is used, this will override Defined Prompt',
placeholder: 'Please write a passage to answer the question\nQuestion: {question}\nPassage:',
type: 'string',
rows: 4,

View File

@ -1,5 +1,6 @@
import { DynamicTool, DynamicToolInput } from 'langchain/tools'
import { BaseChain } from 'langchain/chains'
import { handleEscapeCharacters } from '../../../src/utils'
export interface ChainToolInput extends Omit<DynamicToolInput, 'func'> {
chain: BaseChain
@ -14,7 +15,8 @@ export class ChainTool extends DynamicTool {
func: async (input, runManager) => {
// To enable LLM Chain which has promptValues
if ((chain as any).prompt && (chain as any).prompt.promptValues) {
const values = await chain.call((chain as any).prompt.promptValues, runManager?.getChild())
const promptValues = handleEscapeCharacters((chain as any).prompt.promptValues, true)
const values = await chain.call(promptValues, runManager?.getChild())
return values?.text
}
return chain.run(input, runManager?.getChild())

View File

@ -1,52 +0,0 @@
import { ZapierNLAWrapper, ZapierNLAWrapperParams } from 'langchain/tools'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ZapierToolKit } from 'langchain/agents'
import { getCredentialData, getCredentialParam } from '../../../src'
class ZapierNLA_Tools implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
badge: string
baseClasses: string[]
inputs: INodeParams[]
credential: INodeParams
constructor() {
this.label = 'Zapier NLA'
this.name = 'zapierNLA'
this.version = 1.0
this.type = 'ZapierNLA'
this.icon = 'zapier.svg'
this.category = 'Tools'
this.description = "Access to apps and actions on Zapier's platform through a natural language API interface"
this.badge = 'DEPRECATING'
this.inputs = []
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['zapierNLAApi']
}
this.baseClasses = [this.type, 'Tool']
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const zapierNLAApiKey = getCredentialParam('zapierNLAApiKey', credentialData, nodeData)
const obj: Partial<ZapierNLAWrapperParams> = {
apiKey: zapierNLAApiKey
}
const zapier = new ZapierNLAWrapper(obj)
const toolkit = await ZapierToolKit.fromZapierNLAWrapper(zapier)
return toolkit.tools
}
}
module.exports = { nodeClass: ZapierNLA_Tools }

View File

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="800px" height="800px" viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g>
<path d="M128.080089,-0.000183105 C135.311053,0.0131003068 142.422517,0.624138494 149.335663,1.77979593 L149.335663,1.77979593 L149.335663,76.2997796 L202.166953,23.6044907 C208.002065,27.7488446 213.460883,32.3582023 218.507811,37.3926715 C223.557281,42.4271407 228.192318,47.8867213 232.346817,53.7047992 L232.346817,53.7047992 L179.512985,106.400063 L254.227854,106.400063 C255.387249,113.29414 256,120.36111 256,127.587243 L256,127.587243 L256,127.759881 C256,134.986013 255.387249,142.066204 254.227854,148.960282 L254.227854,148.960282 L179.500273,148.960282 L232.346817,201.642324 C228.192318,207.460402 223.557281,212.919983 218.523066,217.954452 L218.523066,217.954452 L218.507811,217.954452 C213.460883,222.988921 208.002065,227.6115 202.182208,231.742607 L202.182208,231.742607 L149.335663,179.04709 L149.335663,253.5672 C142.435229,254.723036 135.323765,255.333244 128.092802,255.348499 L128.092802,255.348499 L127.907197,255.348499 C120.673691,255.333244 113.590195,254.723036 106.677048,253.5672 L106.677048,253.5672 L106.677048,179.04709 L53.8457596,231.742607 C42.1780766,223.466917 31.977435,213.278734 23.6658953,201.642324 L23.6658953,201.642324 L76.4997269,148.960282 L1.78485803,148.960282 C0.612750404,142.052729 0,134.946095 0,127.719963 L0,127.719963 L0,127.349037 C0.0121454869,125.473817 0.134939797,123.182933 0.311311815,120.812834 L0.36577283,120.099764 C0.887996182,113.428547 1.78485803,106.400063 1.78485803,106.400063 L1.78485803,106.400063 L76.4997269,106.400063 L23.6658953,53.7047992 C27.8076812,47.8867213 32.4300059,42.4403618 37.4769335,37.4193681 L37.4769335,37.4193681 L37.5023588,37.3926715 C42.5391163,32.3582023 48.0106469,27.7488446 53.8457596,23.6044907 L53.8457596,23.6044907 L106.677048,76.2997796 L106.677048,1.77979593 C113.590195,0.624138494 120.688946,0.0131003068 127.932622,-0.000183105 L127.932622,-0.000183105 L128.080089,-0.000183105 Z M128.067377,95.7600714 L127.945335,95.7600714 C118.436262,95.7600714 109.32891,97.5001809 100.910584,100.661566 C97.7553011,109.043534 96.0085811,118.129275 95.9958684,127.613685 L95.9958684,127.733184 C96.0085811,137.217594 97.7553011,146.303589 100.923296,154.685303 C109.32891,157.846943 118.436262,159.587052 127.945335,159.587052 L128.067377,159.587052 C137.576449,159.587052 146.683802,157.846943 155.089415,154.685303 C158.257411,146.290368 160.004131,137.217594 160.004131,127.733184 L160.004131,127.613685 C160.004131,118.129275 158.257411,109.043534 155.089415,100.661566 C146.683802,97.5001809 137.576449,95.7600714 128.067377,95.7600714 Z" fill="#FF4A00" fill-rule="nonzero">
</path>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.7 KiB

View File

@ -31,7 +31,8 @@ class InMemoryVectorStore_VectorStores implements INode {
label: 'Document',
name: 'document',
type: 'Document',
list: true
list: true,
optional: true
},
{
label: 'Embeddings',

View File

@ -1,6 +1,6 @@
{
"name": "flowise-components",
"version": "1.4.2",
"version": "1.4.6",
"description": "Flowiseai Components",
"main": "dist/src/index",
"types": "dist/src/index.d.ts",
@ -21,8 +21,9 @@
"@aws-sdk/client-s3": "^3.427.0",
"@dqbd/tiktoken": "^1.0.7",
"@elastic/elasticsearch": "^8.9.0",
"@getzep/zep-js": "^0.6.3",
"@gomomento/sdk": "^1.40.2",
"@getzep/zep-js": "^0.9.0",
"@gomomento/sdk": "^1.51.1",
"@gomomento/sdk-core": "^1.51.1",
"@google-ai/generativelanguage": "^0.2.1",
"@huggingface/inference": "^2.6.1",
"@notionhq/client": "^2.2.8",
@ -35,7 +36,7 @@
"@upstash/redis": "^1.22.1",
"@zilliz/milvus2-sdk-node": "^2.2.24",
"apify-client": "^2.7.1",
"axios": "^0.27.2",
"axios": "1.6.2",
"cheerio": "^1.0.0-rc.12",
"chromadb": "^1.5.11",
"cohere-ai": "^6.2.0",
@ -49,9 +50,10 @@
"html-to-text": "^9.0.5",
"husky": "^8.0.3",
"ioredis": "^5.3.2",
"langchain": "^0.0.165",
"langchain": "^0.0.196",
"langfuse": "^1.2.0",
"langfuse-langchain": "^1.0.31",
"langsmith": "^0.0.32",
"langsmith": "^0.0.49",
"linkifyjs": "^4.1.1",
"llmonitor": "^0.5.5",
"mammoth": "^1.5.1",

View File

@ -107,9 +107,12 @@ export interface INode extends INodeProperties {
search: (nodeData: INodeData, options?: ICommonObject) => Promise<any>
delete: (nodeData: INodeData, options?: ICommonObject) => Promise<void>
}
memoryMethods?: {
clearSessionMemory: (nodeData: INodeData, options?: ICommonObject) => Promise<void>
getChatMessages: (nodeData: INodeData, options?: ICommonObject) => Promise<string>
}
init?(nodeData: INodeData, input: string, options?: ICommonObject): Promise<any>
run?(nodeData: INodeData, input: string, options?: ICommonObject): Promise<string | ICommonObject>
clearSessionMemory?(nodeData: INodeData, options?: ICommonObject): Promise<void>
}
export interface INodeData extends INodeProperties {

View File

@ -8,6 +8,10 @@ import { LLMonitorHandler } from 'langchain/callbacks/handlers/llmonitor'
import { getCredentialData, getCredentialParam } from './utils'
import { ICommonObject, INodeData } from './Interface'
import CallbackHandler from 'langfuse-langchain'
import { RunTree, RunTreeConfig, Client as LangsmithClient } from 'langsmith'
import { Langfuse, LangfuseTraceClient, LangfuseSpanClient, LangfuseGenerationClient } from 'langfuse'
import monitor from 'llmonitor'
import { v4 as uuidv4 } from 'uuid'
interface AgentRun extends Run {
actions: AgentAction[]
@ -273,3 +277,488 @@ export const additionalCallbacks = async (nodeData: INodeData, options: ICommonO
throw new Error(e)
}
}
export class AnalyticHandler {
nodeData: INodeData
options: ICommonObject = {}
handlers: ICommonObject = {}
constructor(nodeData: INodeData, options: ICommonObject) {
this.options = options
this.nodeData = nodeData
this.init()
}
async init() {
try {
if (!this.options.analytic) return
const analytic = JSON.parse(this.options.analytic)
for (const provider in analytic) {
const providerStatus = analytic[provider].status as boolean
if (providerStatus) {
const credentialId = analytic[provider].credentialId as string
const credentialData = await getCredentialData(credentialId ?? '', this.options)
if (provider === 'langSmith') {
const langSmithProject = analytic[provider].projectName as string
const langSmithApiKey = getCredentialParam('langSmithApiKey', credentialData, this.nodeData)
const langSmithEndpoint = getCredentialParam('langSmithEndpoint', credentialData, this.nodeData)
const client = new LangsmithClient({
apiUrl: langSmithEndpoint ?? 'https://api.smith.langchain.com',
apiKey: langSmithApiKey
})
this.handlers['langSmith'] = { client, langSmithProject }
} else if (provider === 'langFuse') {
const release = analytic[provider].release as string
const langFuseSecretKey = getCredentialParam('langFuseSecretKey', credentialData, this.nodeData)
const langFusePublicKey = getCredentialParam('langFusePublicKey', credentialData, this.nodeData)
const langFuseEndpoint = getCredentialParam('langFuseEndpoint', credentialData, this.nodeData)
const langfuse = new Langfuse({
secretKey: langFuseSecretKey,
publicKey: langFusePublicKey,
baseUrl: langFuseEndpoint ?? 'https://cloud.langfuse.com',
release
})
this.handlers['langFuse'] = { client: langfuse }
} else if (provider === 'llmonitor') {
const llmonitorAppId = getCredentialParam('llmonitorAppId', credentialData, this.nodeData)
const llmonitorEndpoint = getCredentialParam('llmonitorEndpoint', credentialData, this.nodeData)
monitor.init({
appId: llmonitorAppId,
apiUrl: llmonitorEndpoint
})
this.handlers['llmonitor'] = { client: monitor }
}
}
}
} catch (e) {
throw new Error(e)
}
}
async onChainStart(name: string, input: string, parentIds?: ICommonObject) {
const returnIds: ICommonObject = {
langSmith: {},
langFuse: {},
llmonitor: {}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) {
if (!parentIds || !Object.keys(parentIds).length) {
const parentRunConfig: RunTreeConfig = {
name,
run_type: 'chain',
inputs: {
text: input
},
serialized: {},
project_name: this.handlers['langSmith'].langSmithProject,
client: this.handlers['langSmith'].client
}
const parentRun = new RunTree(parentRunConfig)
await parentRun.postRun()
this.handlers['langSmith'].chainRun = { [parentRun.id]: parentRun }
returnIds['langSmith'].chainRun = parentRun.id
} else {
const parentRun: RunTree | undefined = this.handlers['langSmith'].chainRun[parentIds['langSmith'].chainRun]
if (parentRun) {
const childChainRun = await parentRun.createChild({
name,
run_type: 'chain',
inputs: {
text: input
}
})
await childChainRun.postRun()
this.handlers['langSmith'].chainRun = { [childChainRun.id]: childChainRun }
returnIds['langSmith'].chainRun = childChainRun.id
}
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) {
let langfuseTraceClient: LangfuseTraceClient
if (!parentIds || !Object.keys(parentIds).length) {
const langfuse: Langfuse = this.handlers['langFuse'].client
langfuseTraceClient = langfuse.trace({
name,
userId: this.options.chatId,
metadata: { tags: ['openai-assistant'] }
})
} else {
langfuseTraceClient = this.handlers['langFuse'].trace[parentIds['langFuse']]
}
if (langfuseTraceClient) {
const span = langfuseTraceClient.span({
name,
input: {
text: input
}
})
this.handlers['langFuse'].trace = { [langfuseTraceClient.id]: langfuseTraceClient }
this.handlers['langFuse'].span = { [span.id]: span }
returnIds['langFuse'].trace = langfuseTraceClient.id
returnIds['langFuse'].span = span.id
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) {
const monitor = this.handlers['llmonitor'].client
if (monitor) {
const runId = uuidv4()
await monitor.trackEvent('chain', 'start', {
runId,
name,
userId: this.options.chatId,
input
})
this.handlers['llmonitor'].chainEvent = { [runId]: runId }
returnIds['llmonitor'].chainEvent = runId
}
}
return returnIds
}
async onChainEnd(returnIds: ICommonObject, output: string | object, shutdown = false) {
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) {
const chainRun: RunTree | undefined = this.handlers['langSmith'].chainRun[returnIds['langSmith'].chainRun]
if (chainRun) {
await chainRun.end({
outputs: {
output
}
})
await chainRun.patchRun()
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) {
const span: LangfuseSpanClient | undefined = this.handlers['langFuse'].span[returnIds['langFuse'].span]
if (span) {
span.end({
output
})
if (shutdown) {
const langfuse: Langfuse = this.handlers['langFuse'].client
await langfuse.shutdownAsync()
}
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) {
const chainEventId = returnIds['llmonitor'].chainEvent
const monitor = this.handlers['llmonitor'].client
if (monitor && chainEventId) {
await monitor.trackEvent('chain', 'end', {
runId: chainEventId,
output
})
}
}
}
async onChainError(returnIds: ICommonObject, error: string | object, shutdown = false) {
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) {
const chainRun: RunTree | undefined = this.handlers['langSmith'].chainRun[returnIds['langSmith'].chainRun]
if (chainRun) {
await chainRun.end({
error: {
error
}
})
await chainRun.patchRun()
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) {
const span: LangfuseSpanClient | undefined = this.handlers['langFuse'].span[returnIds['langFuse'].span]
if (span) {
span.end({
output: {
error
}
})
if (shutdown) {
const langfuse: Langfuse = this.handlers['langFuse'].client
await langfuse.shutdownAsync()
}
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) {
const chainEventId = returnIds['llmonitor'].chainEvent
const monitor = this.handlers['llmonitor'].client
if (monitor && chainEventId) {
await monitor.trackEvent('chain', 'end', {
runId: chainEventId,
output: error
})
}
}
}
async onLLMStart(name: string, input: string, parentIds: ICommonObject) {
const returnIds: ICommonObject = {
langSmith: {},
langFuse: {},
llmonitor: {}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) {
const parentRun: RunTree | undefined = this.handlers['langSmith'].chainRun[parentIds['langSmith'].chainRun]
if (parentRun) {
const childLLMRun = await parentRun.createChild({
name,
run_type: 'llm',
inputs: {
prompts: [input]
}
})
await childLLMRun.postRun()
this.handlers['langSmith'].llmRun = { [childLLMRun.id]: childLLMRun }
returnIds['langSmith'].llmRun = childLLMRun.id
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) {
const trace: LangfuseTraceClient | undefined = this.handlers['langFuse'].trace[parentIds['langFuse'].trace]
if (trace) {
const generation = trace.generation({
name,
prompt: input
})
this.handlers['langFuse'].generation = { [generation.id]: generation }
returnIds['langFuse'].generation = generation.id
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) {
const monitor = this.handlers['llmonitor'].client
const chainEventId: string = this.handlers['llmonitor'].chainEvent[parentIds['llmonitor'].chainEvent]
if (monitor && chainEventId) {
const runId = uuidv4()
await monitor.trackEvent('llm', 'start', {
runId,
parentRunId: chainEventId,
name,
userId: this.options.chatId,
input
})
this.handlers['llmonitor'].llmEvent = { [runId]: runId }
returnIds['llmonitor'].llmEvent = runId
}
}
return returnIds
}
async onLLMEnd(returnIds: ICommonObject, output: string) {
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) {
const llmRun: RunTree | undefined = this.handlers['langSmith'].llmRun[returnIds['langSmith'].llmRun]
if (llmRun) {
await llmRun.end({
outputs: {
generations: [output]
}
})
await llmRun.patchRun()
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) {
const generation: LangfuseGenerationClient | undefined = this.handlers['langFuse'].generation[returnIds['langFuse'].generation]
if (generation) {
generation.end({
completion: output
})
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) {
const llmEventId: string = this.handlers['llmonitor'].llmEvent[returnIds['llmonitor'].llmEvent]
const monitor = this.handlers['llmonitor'].client
if (monitor && llmEventId) {
await monitor.trackEvent('llm', 'end', {
runId: llmEventId,
output
})
}
}
}
async onLLMError(returnIds: ICommonObject, error: string | object) {
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) {
const llmRun: RunTree | undefined = this.handlers['langSmith'].llmRun[returnIds['langSmith'].llmRun]
if (llmRun) {
await llmRun.end({
error: {
error
}
})
await llmRun.patchRun()
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) {
const generation: LangfuseGenerationClient | undefined = this.handlers['langFuse'].generation[returnIds['langFuse'].generation]
if (generation) {
generation.end({
completion: error
})
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) {
const llmEventId: string = this.handlers['llmonitor'].llmEvent[returnIds['llmonitor'].llmEvent]
const monitor = this.handlers['llmonitor'].client
if (monitor && llmEventId) {
await monitor.trackEvent('llm', 'end', {
runId: llmEventId,
output: error
})
}
}
}
async onToolStart(name: string, input: string | object, parentIds: ICommonObject) {
const returnIds: ICommonObject = {
langSmith: {},
langFuse: {},
llmonitor: {}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) {
const parentRun: RunTree | undefined = this.handlers['langSmith'].chainRun[parentIds['langSmith'].chainRun]
if (parentRun) {
const childToolRun = await parentRun.createChild({
name,
run_type: 'tool',
inputs: {
input
}
})
await childToolRun.postRun()
this.handlers['langSmith'].toolRun = { [childToolRun.id]: childToolRun }
returnIds['langSmith'].toolRun = childToolRun.id
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) {
const trace: LangfuseTraceClient | undefined = this.handlers['langFuse'].trace[parentIds['langFuse'].trace]
if (trace) {
const toolSpan = trace.span({
name,
input
})
this.handlers['langFuse'].toolSpan = { [toolSpan.id]: toolSpan }
returnIds['langFuse'].toolSpan = toolSpan.id
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) {
const monitor = this.handlers['llmonitor'].client
const chainEventId: string = this.handlers['llmonitor'].chainEvent[parentIds['llmonitor'].chainEvent]
if (monitor && chainEventId) {
const runId = uuidv4()
await monitor.trackEvent('tool', 'start', {
runId,
parentRunId: chainEventId,
name,
userId: this.options.chatId,
input
})
this.handlers['llmonitor'].toolEvent = { [runId]: runId }
returnIds['llmonitor'].toolEvent = runId
}
}
return returnIds
}
async onToolEnd(returnIds: ICommonObject, output: string | object) {
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) {
const toolRun: RunTree | undefined = this.handlers['langSmith'].toolRun[returnIds['langSmith'].toolRun]
if (toolRun) {
await toolRun.end({
outputs: {
output
}
})
await toolRun.patchRun()
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) {
const toolSpan: LangfuseSpanClient | undefined = this.handlers['langFuse'].toolSpan[returnIds['langFuse'].toolSpan]
if (toolSpan) {
toolSpan.end({
output
})
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) {
const toolEventId: string = this.handlers['llmonitor'].toolEvent[returnIds['llmonitor'].toolEvent]
const monitor = this.handlers['llmonitor'].client
if (monitor && toolEventId) {
await monitor.trackEvent('tool', 'end', {
runId: toolEventId,
output
})
}
}
}
async onToolError(returnIds: ICommonObject, error: string | object) {
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langSmith')) {
const toolRun: RunTree | undefined = this.handlers['langSmith'].toolRun[returnIds['langSmith'].toolRun]
if (toolRun) {
await toolRun.end({
error: {
error
}
})
await toolRun.patchRun()
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'langFuse')) {
const toolSpan: LangfuseSpanClient | undefined = this.handlers['langFuse'].toolSpan[returnIds['langFuse'].toolSpan]
if (toolSpan) {
toolSpan.end({
output: error
})
}
}
if (Object.prototype.hasOwnProperty.call(this.handlers, 'llmonitor')) {
const toolEventId: string = this.handlers['llmonitor'].llmEvent[returnIds['llmonitor'].toolEvent]
const monitor = this.handlers['llmonitor'].client
if (monitor && toolEventId) {
await monitor.trackEvent('tool', 'end', {
runId: toolEventId,
output: error
})
}
}
}
}

View File

@ -549,6 +549,18 @@ export const convertChatHistoryToText = (chatHistory: IMessage[] = []): string =
.join('\n')
}
/**
* Serialize array chat history to string
* @param {IMessage[]} chatHistory
* @returns {string}
*/
export const serializeChatHistory = (chatHistory: string | Array<string>) => {
if (Array.isArray(chatHistory)) {
return chatHistory.join('\n')
}
return chatHistory
}
/**
* Convert schema to zod schema
* @param {string | object} schema

View File

@ -3,7 +3,7 @@
"nodes": [
{
"width": 300,
"height": 510,
"height": 491,
"id": "openApiChain_1",
"position": {
"x": 1203.1825726424859,
@ -13,8 +13,8 @@
"data": {
"id": "openApiChain_1",
"label": "OpenAPI Chain",
"name": "openApiChain",
"version": 1,
"name": "openApiChain",
"type": "OpenAPIChain",
"baseClasses": ["OpenAPIChain", "BaseChain"],
"category": "Chains",
@ -78,7 +78,7 @@
},
{
"width": 300,
"height": 523,
"height": 574,
"id": "chatOpenAI_1",
"position": {
"x": 792.3201947594027,
@ -88,8 +88,8 @@
"data": {
"id": "chatOpenAI_1",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"version": 2,
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
@ -259,8 +259,8 @@
"data": {
"id": "chainTool_0",
"label": "Chain Tool",
"name": "chainTool",
"version": 1,
"name": "chainTool",
"type": "ChainTool",
"baseClasses": ["ChainTool", "DynamicTool", "Tool", "StructuredTool"],
"category": "Tools",
@ -334,7 +334,7 @@
"id": "openAIFunctionAgent_0",
"label": "OpenAI Function Agent",
"name": "openAIFunctionAgent",
"version": 1,
"version": 3,
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain"],
"category": "Agents",
@ -365,9 +365,8 @@
"id": "openAIFunctionAgent_0-input-memory-BaseChatMemory"
},
{
"label": "OpenAI Chat Model",
"label": "OpenAI/Azure Chat Model",
"name": "model",
"description": "Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer <a target=\"_blank\" href=\"https://platform.openai.com/docs/guides/gpt/function-calling\">docs</a> for more info",
"type": "BaseChatModel",
"id": "openAIFunctionAgent_0-input-model-BaseChatModel"
}
@ -398,7 +397,7 @@
},
{
"width": 300,
"height": 523,
"height": 574,
"id": "chatOpenAI_2",
"position": {
"x": 1645.450699499575,
@ -408,8 +407,8 @@
"data": {
"id": "chatOpenAI_2",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"version": 2,
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
@ -579,8 +578,8 @@
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"name": "bufferMemory",
"version": 1,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
@ -658,17 +657,6 @@
"label": ""
}
},
{
"source": "chatOpenAI_2",
"sourceHandle": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"target": "openAIFunctionAgent_0",
"targetHandle": "openAIFunctionAgent_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_2-chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-openAIFunctionAgent_0-openAIFunctionAgent_0-input-model-BaseChatModel",
"data": {
"label": ""
}
},
{
"source": "bufferMemory_0",
"sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
@ -679,6 +667,17 @@
"data": {
"label": ""
}
},
{
"source": "chatOpenAI_2",
"sourceHandle": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"target": "openAIFunctionAgent_0",
"targetHandle": "openAIFunctionAgent_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_2-chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-openAIFunctionAgent_0-openAIFunctionAgent_0-input-model-BaseChatModel",
"data": {
"label": ""
}
}
]
}

View File

@ -13,8 +13,8 @@
"data": {
"id": "getApiChain_0",
"label": "GET API Chain",
"name": "getApiChain",
"version": 1,
"name": "getApiChain",
"type": "GETApiChain",
"baseClasses": ["GETApiChain", "BaseChain", "BaseLangChain"],
"category": "Chains",
@ -102,8 +102,8 @@
"data": {
"id": "chainTool_0",
"label": "Chain Tool",
"name": "chainTool",
"version": 1,
"name": "chainTool",
"type": "ChainTool",
"baseClasses": ["ChainTool", "DynamicTool", "Tool", "StructuredTool", "BaseLangChain"],
"category": "Tools",
@ -176,8 +176,8 @@
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"name": "bufferMemory",
"version": 1,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
@ -233,8 +233,8 @@
"data": {
"id": "chainTool_1",
"label": "Chain Tool",
"name": "chainTool",
"version": 1,
"name": "chainTool",
"type": "ChainTool",
"baseClasses": ["ChainTool", "DynamicTool", "Tool", "StructuredTool", "BaseLangChain"],
"category": "Tools",
@ -307,8 +307,8 @@
"data": {
"id": "postApiChain_0",
"label": "POST API Chain",
"name": "postApiChain",
"version": 1,
"name": "postApiChain",
"type": "POSTApiChain",
"baseClasses": ["POSTApiChain", "BaseChain", "BaseLangChain"],
"category": "Chains",
@ -386,7 +386,7 @@
},
{
"width": 300,
"height": 523,
"height": 574,
"id": "chatOpenAI_2",
"position": {
"x": 572.8941615312035,
@ -396,8 +396,8 @@
"data": {
"id": "chatOpenAI_2",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"version": 2,
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
@ -557,7 +557,7 @@
},
{
"width": 300,
"height": 523,
"height": 574,
"id": "chatOpenAI_1",
"position": {
"x": 828.7788305309582,
@ -567,8 +567,8 @@
"data": {
"id": "chatOpenAI_1",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"version": 2,
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
@ -728,7 +728,7 @@
},
{
"width": 300,
"height": 523,
"height": 574,
"id": "chatOpenAI_3",
"position": {
"x": 1148.338912314111,
@ -738,8 +738,8 @@
"data": {
"id": "chatOpenAI_3",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"version": 2,
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
@ -869,7 +869,7 @@
}
],
"inputs": {
"modelName": "gpt-3.5-turbo",
"modelName": "gpt-3.5-turbo-16k",
"temperature": 0.9,
"maxTokens": "",
"topP": "",
@ -902,17 +902,17 @@
"height": 383,
"id": "conversationalAgent_0",
"position": {
"x": 2114.071431691489,
"y": 941.7926368551367
"x": 2090.570467632979,
"y": 969.5131357270544
},
"type": "customNode",
"data": {
"id": "conversationalAgent_0",
"label": "Conversational Agent",
"version": 2,
"name": "conversationalAgent",
"version": 1,
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain"],
"baseClasses": ["AgentExecutor", "BaseChain", "Runnable"],
"category": "Agents",
"description": "Conversational agent for a chat model. It will utilize chat specific prompts",
"inputParams": [
@ -938,8 +938,8 @@
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "conversationalAgent_0-input-model-BaseLanguageModel"
"type": "BaseChatModel",
"id": "conversationalAgent_0-input-model-BaseChatModel"
},
{
"label": "Memory",
@ -956,21 +956,21 @@
},
"outputAnchors": [
{
"id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain",
"id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|Runnable",
"name": "conversationalAgent",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain"
"type": "AgentExecutor | BaseChain | Runnable"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"dragging": false,
"positionAbsolute": {
"x": 2114.071431691489,
"y": 941.7926368551367
}
"x": 2090.570467632979,
"y": 969.5131357270544
},
"dragging": false
}
],
"edges": [
@ -1044,9 +1044,9 @@
"source": "chatOpenAI_3",
"sourceHandle": "chatOpenAI_3-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-model-BaseLanguageModel",
"targetHandle": "conversationalAgent_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_3-chatOpenAI_3-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseLanguageModel",
"id": "chatOpenAI_3-chatOpenAI_3-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseChatModel",
"data": {
"label": ""
}

View File

@ -13,8 +13,8 @@
"data": {
"id": "calculator_1",
"label": "Calculator",
"name": "calculator",
"version": 1,
"name": "calculator",
"type": "Calculator",
"baseClasses": ["Calculator", "Tool", "StructuredTool", "BaseLangChain"],
"category": "Tools",
@ -52,8 +52,8 @@
"data": {
"id": "bufferMemory_1",
"label": "Buffer Memory",
"name": "bufferMemory",
"version": 1,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
@ -109,8 +109,8 @@
"data": {
"id": "serpAPI_0",
"label": "Serp API",
"name": "serpAPI",
"version": 1,
"name": "serpAPI",
"type": "SerpAPI",
"baseClasses": ["SerpAPI", "Tool", "StructuredTool"],
"category": "Tools",
@ -146,7 +146,7 @@
},
{
"width": 300,
"height": 523,
"height": 574,
"id": "chatOpenAI_0",
"position": {
"x": 97.01321406237057,
@ -156,8 +156,8 @@
"data": {
"id": "chatOpenAI_0",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"version": 2,
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
@ -287,7 +287,7 @@
}
],
"inputs": {
"modelName": "gpt-3.5-turbo",
"modelName": "gpt-3.5-turbo-16k",
"temperature": 0.9,
"maxTokens": "",
"topP": "",
@ -320,17 +320,17 @@
"height": 383,
"id": "conversationalAgent_0",
"position": {
"x": 1164.4550359451973,
"y": 283.40041124403075
"x": 1191.1524476753796,
"y": 324.2479396683294
},
"type": "customNode",
"data": {
"id": "conversationalAgent_0",
"label": "Conversational Agent",
"version": 2,
"name": "conversationalAgent",
"version": 1,
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain"],
"baseClasses": ["AgentExecutor", "BaseChain", "Runnable"],
"category": "Agents",
"description": "Conversational agent for a chat model. It will utilize chat specific prompts",
"inputParams": [
@ -356,8 +356,8 @@
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "conversationalAgent_0-input-model-BaseLanguageModel"
"type": "BaseChatModel",
"id": "conversationalAgent_0-input-model-BaseChatModel"
},
{
"label": "Memory",
@ -374,10 +374,10 @@
},
"outputAnchors": [
{
"id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain",
"id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|Runnable",
"name": "conversationalAgent",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain"
"type": "AgentExecutor | BaseChain | Runnable"
}
],
"outputs": {},
@ -385,8 +385,8 @@
},
"selected": false,
"positionAbsolute": {
"x": 1164.4550359451973,
"y": 283.40041124403075
"x": 1191.1524476753796,
"y": 324.2479396683294
},
"dragging": false
}
@ -418,9 +418,9 @@
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-model-BaseLanguageModel",
"targetHandle": "conversationalAgent_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseLanguageModel",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseChatModel",
"data": {
"label": ""
}

View File

@ -98,7 +98,7 @@
"data": {
"id": "conversationalRetrievalAgent_0",
"label": "Conversational Retrieval Agent",
"version": 1,
"version": 3,
"name": "conversationalRetrievalAgent",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain", "Runnable"],
@ -130,10 +130,10 @@
"id": "conversationalRetrievalAgent_0-input-memory-BaseChatMemory"
},
{
"label": "OpenAI Chat Model",
"label": "OpenAI/Azure Chat Model",
"name": "model",
"type": "ChatOpenAI",
"id": "conversationalRetrievalAgent_0-input-model-ChatOpenAI"
"type": "BaseChatModel",
"id": "conversationalRetrievalAgent_0-input-model-BaseChatModel"
}
],
"inputs": {
@ -642,9 +642,9 @@
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable",
"target": "conversationalRetrievalAgent_0",
"targetHandle": "conversationalRetrievalAgent_0-input-model-ChatOpenAI",
"targetHandle": "conversationalRetrievalAgent_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalRetrievalAgent_0-conversationalRetrievalAgent_0-input-model-ChatOpenAI",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalRetrievalAgent_0-conversationalRetrievalAgent_0-input-model-BaseChatModel",
"data": {
"label": ""
}

View File

@ -205,7 +205,7 @@
"data": {
"id": "ZepMemory_0",
"label": "Zep Memory",
"version": 1,
"version": 2,
"name": "ZepMemory",
"type": "ZepMemory",
"baseClasses": ["ZepMemory", "BaseChatMemory", "BaseMemory"],
@ -228,13 +228,6 @@
"default": "http://127.0.0.1:8000",
"id": "ZepMemory_0-input-baseURL-string"
},
{
"label": "Auto Summary",
"name": "autoSummary",
"type": "boolean",
"default": true,
"id": "ZepMemory_0-input-autoSummary-boolean"
},
{
"label": "Session Id",
"name": "sessionId",
@ -251,17 +244,10 @@
"type": "number",
"default": "10",
"step": 1,
"additionalParams": true,
"description": "Window of size k to surface the last k back-and-forths to use as memory.",
"id": "ZepMemory_0-input-k-number"
},
{
"label": "Auto Summary Template",
"name": "autoSummaryTemplate",
"type": "string",
"default": "This is the summary of the following conversation:\n{summary}",
"additionalParams": true,
"id": "ZepMemory_0-input-autoSummaryTemplate-string"
},
{
"label": "AI Prefix",
"name": "aiPrefix",
@ -306,10 +292,8 @@
"inputAnchors": [],
"inputs": {
"baseURL": "http://127.0.0.1:8000",
"autoSummary": true,
"sessionId": "",
"k": "10",
"autoSummaryTemplate": "This is the summary of the following conversation:\n{summary}",
"aiPrefix": "ai",
"humanPrefix": "human",
"memoryKey": "chat_history",

View File

@ -1127,81 +1127,6 @@
},
"dragging": false
},
{
"width": 300,
"height": 383,
"id": "conversationalAgent_0",
"position": {
"x": 2506.011817109287,
"y": -241.58006840004734
},
"type": "customNode",
"data": {
"id": "conversationalAgent_0",
"label": "Conversational Agent",
"version": 1,
"name": "conversationalAgent",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain", "Runnable"],
"category": "Agents",
"description": "Conversational agent for a chat model. It will utilize chat specific prompts",
"inputParams": [
{
"label": "System Message",
"name": "systemMessage",
"type": "string",
"rows": 4,
"default": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist.",
"optional": true,
"additionalParams": true,
"id": "conversationalAgent_0-input-systemMessage-string"
}
],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "conversationalAgent_0-input-tools-Tool"
},
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "conversationalAgent_0-input-model-BaseLanguageModel"
},
{
"label": "Memory",
"name": "memory",
"type": "BaseChatMemory",
"id": "conversationalAgent_0-input-memory-BaseChatMemory"
}
],
"inputs": {
"tools": ["{{chainTool_2.data.instance}}", "{{chainTool_3.data.instance}}"],
"model": "{{chatOpenAI_2.data.instance}}",
"memory": "{{bufferMemory_0.data.instance}}",
"systemMessage": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist."
},
"outputAnchors": [
{
"id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|Runnable",
"name": "conversationalAgent",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain | Runnable"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 2506.011817109287,
"y": -241.58006840004734
},
"dragging": false
},
{
"width": 300,
"height": 574,
@ -1602,6 +1527,81 @@
"y": 75.96855802341503
},
"dragging": false
},
{
"width": 300,
"height": 383,
"id": "conversationalAgent_0",
"position": {
"x": 2432.125364763489,
"y": -105.27942167533908
},
"type": "customNode",
"data": {
"id": "conversationalAgent_0",
"label": "Conversational Agent",
"version": 2,
"name": "conversationalAgent",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain", "Runnable"],
"category": "Agents",
"description": "Conversational agent for a chat model. It will utilize chat specific prompts",
"inputParams": [
{
"label": "System Message",
"name": "systemMessage",
"type": "string",
"rows": 4,
"default": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist.",
"optional": true,
"additionalParams": true,
"id": "conversationalAgent_0-input-systemMessage-string"
}
],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "conversationalAgent_0-input-tools-Tool"
},
{
"label": "Language Model",
"name": "model",
"type": "BaseChatModel",
"id": "conversationalAgent_0-input-model-BaseChatModel"
},
{
"label": "Memory",
"name": "memory",
"type": "BaseChatMemory",
"id": "conversationalAgent_0-input-memory-BaseChatMemory"
}
],
"inputs": {
"tools": ["{{chainTool_2.data.instance}}", "{{chainTool_3.data.instance}}"],
"model": "{{chatOpenAI_2.data.instance}}",
"memory": "{{bufferMemory_0.data.instance}}",
"systemMessage": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist."
},
"outputAnchors": [
{
"id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|Runnable",
"name": "conversationalAgent",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain | Runnable"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 2432.125364763489,
"y": -105.27942167533908
},
"dragging": false
}
],
"edges": [
@ -1704,6 +1704,28 @@
"label": ""
}
},
{
"source": "plainText_1",
"sourceHandle": "plainText_1-output-document-Document",
"target": "faiss_0",
"targetHandle": "faiss_0-input-document-Document",
"type": "buttonedge",
"id": "plainText_1-plainText_1-output-document-Document-faiss_0-faiss_0-input-document-Document",
"data": {
"label": ""
}
},
{
"source": "recursiveCharacterTextSplitter_0",
"sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer|Runnable",
"target": "plainText_1",
"targetHandle": "plainText_1-input-textSplitter-TextSplitter",
"type": "buttonedge",
"id": "recursiveCharacterTextSplitter_0-recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer|Runnable-plainText_1-plainText_1-input-textSplitter-TextSplitter",
"data": {
"label": ""
}
},
{
"source": "chainTool_2",
"sourceHandle": "chainTool_2-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain",
@ -1730,9 +1752,9 @@
"source": "chatOpenAI_2",
"sourceHandle": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-model-BaseLanguageModel",
"targetHandle": "conversationalAgent_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_2-chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalAgent_0-conversationalAgent_0-input-model-BaseLanguageModel",
"id": "chatOpenAI_2-chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalAgent_0-conversationalAgent_0-input-model-BaseChatModel",
"data": {
"label": ""
}
@ -1747,28 +1769,6 @@
"data": {
"label": ""
}
},
{
"source": "plainText_1",
"sourceHandle": "plainText_1-output-document-Document",
"target": "faiss_0",
"targetHandle": "faiss_0-input-document-Document",
"type": "buttonedge",
"id": "plainText_1-plainText_1-output-document-Document-faiss_0-faiss_0-input-document-Document",
"data": {
"label": ""
}
},
{
"source": "recursiveCharacterTextSplitter_0",
"sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer|Runnable",
"target": "plainText_1",
"targetHandle": "plainText_1-input-textSplitter-TextSplitter",
"type": "buttonedge",
"id": "recursiveCharacterTextSplitter_0-recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer|Runnable-plainText_1-plainText_1-input-textSplitter-TextSplitter",
"data": {
"label": ""
}
}
]
}

View File

@ -13,8 +13,8 @@
"data": {
"id": "calculator_0",
"label": "Calculator",
"name": "calculator",
"version": 1,
"name": "calculator",
"type": "Calculator",
"baseClasses": ["Calculator", "Tool", "StructuredTool", "BaseLangChain", "Serializable"],
"category": "Tools",
@ -52,8 +52,8 @@
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"name": "bufferMemory",
"version": 1,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
@ -109,8 +109,8 @@
"data": {
"id": "customTool_0",
"label": "Custom Tool",
"name": "customTool",
"version": 1,
"name": "customTool",
"type": "CustomTool",
"baseClasses": ["CustomTool", "Tool", "StructuredTool"],
"category": "Tools",
@ -158,8 +158,8 @@
"data": {
"id": "serper_0",
"label": "Serper",
"name": "serper",
"version": 1,
"name": "serper",
"type": "Serper",
"baseClasses": ["Serper", "Tool", "StructuredTool"],
"category": "Tools",
@ -206,7 +206,7 @@
"id": "openAIFunctionAgent_0",
"label": "OpenAI Function Agent",
"name": "openAIFunctionAgent",
"version": 1,
"version": 3,
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain"],
"category": "Agents",
@ -237,9 +237,8 @@
"id": "openAIFunctionAgent_0-input-memory-BaseChatMemory"
},
{
"label": "OpenAI Chat Model",
"label": "OpenAI/Azure Chat Model",
"name": "model",
"description": "Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer <a target=\"_blank\" href=\"https://platform.openai.com/docs/guides/gpt/function-calling\">docs</a> for more info",
"type": "BaseChatModel",
"id": "openAIFunctionAgent_0-input-model-BaseChatModel"
}
@ -270,7 +269,7 @@
},
{
"width": 300,
"height": 523,
"height": 574,
"id": "chatOpenAI_0",
"position": {
"x": 817.8210275868742,
@ -280,8 +279,8 @@
"data": {
"id": "chatOpenAI_0",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"version": 2,
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
@ -474,17 +473,6 @@
"label": ""
}
},
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"target": "openAIFunctionAgent_0",
"targetHandle": "openAIFunctionAgent_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-openAIFunctionAgent_0-openAIFunctionAgent_0-input-model-BaseChatModel",
"data": {
"label": ""
}
},
{
"source": "bufferMemory_0",
"sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
@ -495,6 +483,17 @@
"data": {
"label": ""
}
},
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"target": "openAIFunctionAgent_0",
"targetHandle": "openAIFunctionAgent_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-openAIFunctionAgent_0-openAIFunctionAgent_0-input-model-BaseChatModel",
"data": {
"label": ""
}
}
]
}

View File

@ -14,7 +14,7 @@
"data": {
"id": "openAIAssistant_0",
"label": "OpenAI Assistant",
"version": 1,
"version": 2,
"name": "openAIAssistant",
"type": "OpenAIAssistant",
"baseClasses": ["OpenAIAssistant"],
@ -27,6 +27,15 @@
"type": "asyncOptions",
"loadMethod": "listAssistants",
"id": "openAIAssistant_0-input-selectedAssistant-asyncOptions"
},
{
"label": "Disable File Download",
"name": "disableFileDownload",
"type": "boolean",
"description": "Messages can contain text, images, or files. In some cases, you may want to prevent others from downloading the files. Learn more from OpenAI File Annotation <a target=\"_blank\" href=\"https://platform.openai.com/docs/assistants/how-it-works/managing-threads-and-messages\">docs</a>",
"optional": true,
"additionalParams": true,
"id": "openAIAssistant_0-input-disableFileDownload-boolean"
}
],
"inputAnchors": [

View File

@ -13,8 +13,8 @@
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"name": "bufferMemory",
"version": 1,
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
@ -70,8 +70,8 @@
"data": {
"id": "webBrowser_0",
"label": "Web Browser",
"name": "webBrowser",
"version": 1,
"name": "webBrowser",
"type": "WebBrowser",
"baseClasses": ["WebBrowser", "Tool", "StructuredTool", "BaseLangChain"],
"category": "Tools",
@ -115,82 +115,7 @@
},
{
"width": 300,
"height": 383,
"id": "conversationalAgent_0",
"position": {
"x": 1464.513303631911,
"y": 155.73036805253955
},
"type": "customNode",
"data": {
"id": "conversationalAgent_0",
"label": "Conversational Agent",
"name": "conversationalAgent",
"version": 1,
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain"],
"category": "Agents",
"description": "Conversational agent for a chat model. It will utilize chat specific prompts",
"inputParams": [
{
"label": "System Message",
"name": "systemMessage",
"type": "string",
"rows": 4,
"default": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist.",
"optional": true,
"additionalParams": true,
"id": "conversationalAgent_0-input-systemMessage-string"
}
],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "conversationalAgent_0-input-tools-Tool"
},
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "conversationalAgent_0-input-model-BaseLanguageModel"
},
{
"label": "Memory",
"name": "memory",
"type": "BaseChatMemory",
"id": "conversationalAgent_0-input-memory-BaseChatMemory"
}
],
"inputs": {
"tools": ["{{webBrowser_0.data.instance}}"],
"model": "{{chatOpenAI_1.data.instance}}",
"memory": "{{bufferMemory_0.data.instance}}",
"systemMessage": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist."
},
"outputAnchors": [
{
"id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain",
"name": "conversationalAgent",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1464.513303631911,
"y": 155.73036805253955
},
"dragging": false
},
{
"width": 300,
"height": 523,
"height": 574,
"id": "chatOpenAI_0",
"position": {
"x": 734.7477982032904,
@ -200,8 +125,8 @@
"data": {
"id": "chatOpenAI_0",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"version": 2,
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
@ -371,8 +296,8 @@
"data": {
"id": "openAIEmbeddings_0",
"label": "OpenAI Embeddings",
"name": "openAIEmbeddings",
"version": 1,
"name": "openAIEmbeddings",
"type": "OpenAIEmbeddings",
"baseClasses": ["OpenAIEmbeddings", "Embeddings"],
"category": "Embeddings",
@ -445,7 +370,7 @@
},
{
"width": 300,
"height": 523,
"height": 574,
"id": "chatOpenAI_1",
"position": {
"x": 68.312124033115,
@ -455,8 +380,8 @@
"data": {
"id": "chatOpenAI_1",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"version": 2,
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
@ -586,7 +511,7 @@
}
],
"inputs": {
"modelName": "gpt-3.5-turbo",
"modelName": "gpt-3.5-turbo-16k",
"temperature": 0.9,
"maxTokens": "",
"topP": "",
@ -613,6 +538,81 @@
"y": -239.65476709991256
},
"dragging": false
},
{
"width": 300,
"height": 383,
"id": "conversationalAgent_0",
"position": {
"x": 1518.944765840293,
"y": 212.2513364217197
},
"type": "customNode",
"data": {
"id": "conversationalAgent_0",
"label": "Conversational Agent",
"version": 2,
"name": "conversationalAgent",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain", "Runnable"],
"category": "Agents",
"description": "Conversational agent for a chat model. It will utilize chat specific prompts",
"inputParams": [
{
"label": "System Message",
"name": "systemMessage",
"type": "string",
"rows": 4,
"default": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist.",
"optional": true,
"additionalParams": true,
"id": "conversationalAgent_0-input-systemMessage-string"
}
],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "conversationalAgent_0-input-tools-Tool"
},
{
"label": "Language Model",
"name": "model",
"type": "BaseChatModel",
"id": "conversationalAgent_0-input-model-BaseChatModel"
},
{
"label": "Memory",
"name": "memory",
"type": "BaseChatMemory",
"id": "conversationalAgent_0-input-memory-BaseChatMemory"
}
],
"inputs": {
"tools": ["{{webBrowser_0.data.instance}}"],
"model": "{{chatOpenAI_1.data.instance}}",
"memory": "{{bufferMemory_0.data.instance}}",
"systemMessage": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist."
},
"outputAnchors": [
{
"id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|Runnable",
"name": "conversationalAgent",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain | Runnable"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1518.944765840293,
"y": 212.2513364217197
},
"dragging": false
}
],
"edges": [
@ -638,17 +638,6 @@
"label": ""
}
},
{
"source": "chatOpenAI_1",
"sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseLanguageModel",
"data": {
"label": ""
}
},
{
"source": "webBrowser_0",
"sourceHandle": "webBrowser_0-output-webBrowser-WebBrowser|Tool|StructuredTool|BaseLangChain",
@ -660,6 +649,17 @@
"label": ""
}
},
{
"source": "chatOpenAI_1",
"sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseChatModel",
"data": {
"label": ""
}
},
{
"source": "bufferMemory_0",
"sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",

View File

@ -1,6 +1,6 @@
{
"name": "flowise",
"version": "1.4.2",
"version": "1.4.5",
"description": "Flowiseai Server",
"main": "dist/index",
"types": "dist/index.d.ts",
@ -47,7 +47,7 @@
"dependencies": {
"@oclif/core": "^1.13.10",
"async-mutex": "^0.4.0",
"axios": "^0.27.2",
"axios": "1.6.2",
"cors": "^2.8.5",
"crypto-js": "^4.1.1",
"dotenv": "^16.0.0",
@ -61,6 +61,7 @@
"mysql": "^2.18.1",
"pg": "^8.11.1",
"reflect-metadata": "^0.1.13",
"sanitize-html": "^2.11.0",
"socket.io": "^4.6.1",
"sqlite3": "^5.1.6",
"typeorm": "^0.3.6",
@ -71,6 +72,7 @@
"@types/cors": "^2.8.12",
"@types/crypto-js": "^4.1.1",
"@types/multer": "^1.4.7",
"@types/sanitize-html": "^2.9.5",
"concurrently": "^7.1.0",
"nodemon": "^2.0.15",
"oclif": "^3",

View File

@ -16,7 +16,7 @@ export class ChatflowPool {
* @param {IReactFlowNode[]} startingNodes
* @param {ICommonObject} overrideConfig
*/
add(chatflowid: string, endingNodeData: INodeData, startingNodes: IReactFlowNode[], overrideConfig?: ICommonObject) {
add(chatflowid: string, endingNodeData: INodeData | undefined, startingNodes: IReactFlowNode[], overrideConfig?: ICommonObject) {
this.activeChatflows[chatflowid] = {
startingNodes,
endingNodeData,

View File

@ -172,7 +172,7 @@ export interface IncomingInput {
export interface IActiveChatflows {
[key: string]: {
startingNodes: IReactFlowNode[]
endingNodeData: INodeData
endingNodeData?: INodeData
inSync: boolean
overrideConfig?: ICommonObject
}
@ -190,12 +190,6 @@ export interface IOverrideConfig {
type: string
}
export interface IDatabaseExport {
chatmessages: IChatMessage[]
chatflows: IChatFlow[]
apikeys: ICommonObject[]
}
export type ICredentialDataDecrypted = ICommonObject
// Plain credential object sent to server

View File

@ -17,7 +17,6 @@ import {
IReactFlowNode,
IReactFlowObject,
INodeData,
IDatabaseExport,
ICredentialReturnResponse,
chatType,
IChatMessage,
@ -31,18 +30,11 @@ import {
constructGraphs,
resolveVariables,
isStartNodeDependOnInput,
getAPIKeys,
addAPIKey,
updateAPIKey,
deleteAPIKey,
compareKeys,
mapMimeTypeToInputField,
findAvailableConfigs,
isSameOverrideConfig,
replaceAllAPIKeys,
isFlowValidForStream,
databaseEntities,
getApiKey,
transformToCredentialEntity,
decryptCredentialData,
clearAllSessionMemory,
@ -50,7 +42,8 @@ import {
getEncryptionKey,
checkMemorySessionId,
clearSessionMemoryFromViewMessageDialog,
getUserHome
getUserHome,
replaceChatHistory
} from './utils'
import { cloneDeep, omit, uniqWith, isEqual } from 'lodash'
import { getDataSource } from './DataSource'
@ -62,8 +55,13 @@ import { Tool } from './database/entities/Tool'
import { Assistant } from './database/entities/Assistant'
import { ChatflowPool } from './ChatflowPool'
import { CachePool } from './CachePool'
import { ICommonObject, INodeOptionsValue } from 'flowise-components'
import { ICommonObject, IMessage, INodeOptionsValue } from 'flowise-components'
import { createRateLimiter, getRateLimiter, initializeRateLimiter } from './utils/rateLimit'
import { addAPIKey, compareKeys, deleteAPIKey, getApiKey, getAPIKeys, updateAPIKey } from './utils/apiKey'
import { sanitizeMiddleware } from './utils/XSS'
import axios from 'axios'
import { Client } from 'langchainhub'
import { parsePrompt } from './utils/hub'
export class App {
app: express.Application
@ -121,9 +119,15 @@ export class App {
// Allow access from *
this.app.use(cors())
// Switch off the default 'X-Powered-By: Express' header
this.app.disable('x-powered-by')
// Add the expressRequestLogger middleware to log all requests
this.app.use(expressRequestLogger)
// Add the sanitizeMiddleware to guard against XSS
this.app.use(sanitizeMiddleware)
if (process.env.FLOWISE_USERNAME && process.env.FLOWISE_PASSWORD) {
const username = process.env.FLOWISE_USERNAME
const password = process.env.FLOWISE_PASSWORD
@ -196,7 +200,7 @@ export class App {
// Get component credential via name
this.app.get('/api/v1/components-credentials/:name', (req: Request, res: Response) => {
if (!req.params.name.includes('&')) {
if (!req.params.name.includes('&amp;')) {
if (Object.prototype.hasOwnProperty.call(this.nodesPool.componentCredentials, req.params.name)) {
return res.json(this.nodesPool.componentCredentials[req.params.name])
} else {
@ -204,7 +208,7 @@ export class App {
}
} else {
const returnResponse = []
for (const name of req.params.name.split('&')) {
for (const name of req.params.name.split('&amp;')) {
if (Object.prototype.hasOwnProperty.call(this.nodesPool.componentCredentials, name)) {
returnResponse.push(this.nodesPool.componentCredentials[name])
} else {
@ -970,6 +974,12 @@ export class App {
// Download file from assistant
this.app.post('/api/v1/openai-assistants-file', async (req: Request, res: Response) => {
const filePath = path.join(getUserHome(), '.flowise', 'openai-assistant', req.body.fileName)
//raise error if file path is not absolute
if (!path.isAbsolute(filePath)) return res.status(500).send(`Invalid file path`)
//raise error if file path contains '..'
if (filePath.includes('..')) return res.status(500).send(`Invalid file path`)
//only return from the .flowise openai-assistant folder
if (!(filePath.includes('.flowise') && filePath.includes('openai-assistant'))) return res.status(500).send(`Invalid file path`)
res.setHeader('Content-Disposition', 'attachment; filename=' + path.basename(filePath))
const fileStream = fs.createReadStream(filePath)
fileStream.pipe(res)
@ -1025,57 +1035,6 @@ export class App {
}
})
// ----------------------------------------
// Export Load Chatflow & ChatMessage & Apikeys
// ----------------------------------------
this.app.get('/api/v1/database/export', async (req: Request, res: Response) => {
const chatmessages = await this.AppDataSource.getRepository(ChatMessage).find()
const chatflows = await this.AppDataSource.getRepository(ChatFlow).find()
const apikeys = await getAPIKeys()
const result: IDatabaseExport = {
chatmessages,
chatflows,
apikeys
}
return res.json(result)
})
this.app.post('/api/v1/database/load', async (req: Request, res: Response) => {
const databaseItems: IDatabaseExport = req.body
await this.AppDataSource.getRepository(ChatFlow).delete({})
await this.AppDataSource.getRepository(ChatMessage).delete({})
let error = ''
// Get a new query runner instance
const queryRunner = this.AppDataSource.createQueryRunner()
// Start a new transaction
await queryRunner.startTransaction()
try {
const chatflows: ChatFlow[] = databaseItems.chatflows
const chatmessages: ChatMessage[] = databaseItems.chatmessages
await queryRunner.manager.insert(ChatFlow, chatflows)
await queryRunner.manager.insert(ChatMessage, chatmessages)
await queryRunner.commitTransaction()
} catch (err: any) {
error = err?.message ?? 'Error loading database'
await queryRunner.rollbackTransaction()
} finally {
await queryRunner.release()
}
await replaceAllAPIKeys(databaseItems.apikeys)
if (error) return res.status(500).send(error)
return res.status(201).send('OK')
})
// ----------------------------------------
// Upsert
// ----------------------------------------
@ -1093,6 +1052,35 @@ export class App {
await this.buildChatflow(req, res, undefined, true, true)
})
// ----------------------------------------
// Prompt from Hub
// ----------------------------------------
this.app.post('/api/v1/load-prompt', async (req: Request, res: Response) => {
try {
let hub = new Client()
const prompt = await hub.pull(req.body.promptName)
const templates = parsePrompt(prompt)
return res.json({ status: 'OK', prompt: req.body.promptName, templates: templates })
} catch (e: any) {
return res.json({ status: 'ERROR', prompt: req.body.promptName, error: e?.message })
}
})
this.app.post('/api/v1/prompts-list', async (req: Request, res: Response) => {
try {
const tags = req.body.tags ? `tags=${req.body.tags}` : ''
// Default to 100, TODO: add pagination and use offset & limit
const url = `https://api.hub.langchain.com/repos/?limit=100&${tags}has_commits=true&sort_field=num_likes&sort_direction=desc&is_archived=false`
axios.get(url).then((response) => {
if (response.data.repos) {
return res.json({ status: 'OK', repos: response.data.repos })
}
})
} catch (e: any) {
return res.json({ status: 'ERROR', repos: [] })
}
})
// ----------------------------------------
// Prediction
// ----------------------------------------
@ -1325,14 +1313,14 @@ export class App {
* @param {IReactFlowEdge[]} edges
* @returns {string | undefined}
*/
findMemoryLabel(nodes: IReactFlowNode[], edges: IReactFlowEdge[]): string | undefined {
findMemoryLabel(nodes: IReactFlowNode[], edges: IReactFlowEdge[]): IReactFlowNode | undefined {
const memoryNodes = nodes.filter((node) => node.data.category === 'Memory')
const memoryNodeIds = memoryNodes.map((mem) => mem.data.id)
for (const edge of edges) {
if (memoryNodeIds.includes(edge.source)) {
const memoryNode = nodes.find((node) => node.data.id === edge.source)
return memoryNode ? memoryNode.data.label : undefined
return memoryNode
}
}
return undefined
@ -1398,16 +1386,19 @@ export class App {
const nodes = parsedFlowData.nodes
const edges = parsedFlowData.edges
/* Reuse the flow without having to rebuild (to avoid duplicated upsert, recomputation) when all these conditions met:
/* Reuse the flow without having to rebuild (to avoid duplicated upsert, recomputation, reinitialization of memory) when all these conditions met:
* - Node Data already exists in pool
* - Still in sync (i.e the flow has not been modified since)
* - Existing overrideConfig and new overrideConfig are the same
* - Flow doesn't start with/contain nodes that depend on incomingInput.question
* - Its not an Upsert request
* TODO: convert overrideConfig to hash when we no longer store base64 string but filepath
***/
const isFlowReusable = () => {
return (
Object.prototype.hasOwnProperty.call(this.chatflowPool.activeChatflows, chatflowid) &&
this.chatflowPool.activeChatflows[chatflowid].inSync &&
this.chatflowPool.activeChatflows[chatflowid].endingNodeData &&
isSameOverrideConfig(
isInternal,
this.chatflowPool.activeChatflows[chatflowid].overrideConfig,
@ -1419,7 +1410,7 @@ export class App {
}
if (isFlowReusable()) {
nodeToExecuteData = this.chatflowPool.activeChatflows[chatflowid].endingNodeData
nodeToExecuteData = this.chatflowPool.activeChatflows[chatflowid].endingNodeData as INodeData
isStreamValid = isFlowValidForStream(nodes, nodeToExecuteData)
logger.debug(
`[server]: Reuse existing chatflow ${chatflowid} with ending node ${nodeToExecuteData.label} (${nodeToExecuteData.id})`
@ -1453,10 +1444,24 @@ export class App {
isStreamValid = isFlowValidForStream(nodes, endingNodeData)
let chatHistory: IMessage[] | string = incomingInput.history
if (
endingNodeData.inputs?.memory &&
!incomingInput.history &&
(incomingInput.chatId || incomingInput.overrideConfig?.sessionId)
) {
const memoryNodeId = endingNodeData.inputs?.memory.split('.')[0].replace('{{', '')
const memoryNode = nodes.find((node) => node.data.id === memoryNodeId)
if (memoryNode) {
chatHistory = await replaceChatHistory(memoryNode, incomingInput, this.AppDataSource, databaseEntities, logger)
}
}
/*** Get Starting Nodes with Non-Directed Graph ***/
const constructedObj = constructGraphs(nodes, edges, true)
const nonDirectedGraph = constructedObj.graph
const { startingNodeIds, depthQueue } = getStartingNodes(nonDirectedGraph, endingNodeId)
const startingNodes = nodes.filter((nd) => startingNodeIds.includes(nd.id))
logger.debug(`[server]: Start building chatflow ${chatflowid}`)
/*** BFS to traverse from Starting Nodes to Ending Node ***/
@ -1467,7 +1472,7 @@ export class App {
depthQueue,
this.nodesPool.componentNodes,
incomingInput.question,
incomingInput.history,
chatHistory,
chatId,
chatflowid,
this.AppDataSource,
@ -1476,22 +1481,26 @@ export class App {
isUpsert,
incomingInput.stopNodeId
)
if (isUpsert) return res.status(201).send('Successfully Upserted')
if (isUpsert) {
this.chatflowPool.add(chatflowid, undefined, startingNodes, incomingInput?.overrideConfig)
return res.status(201).send('Successfully Upserted')
}
const nodeToExecute = reactFlowNodes.find((node: IReactFlowNode) => node.id === endingNodeId)
if (!nodeToExecute) return res.status(404).send(`Node ${endingNodeId} not found`)
if (incomingInput.overrideConfig)
if (incomingInput.overrideConfig) {
nodeToExecute.data = replaceInputsWithConfig(nodeToExecute.data, incomingInput.overrideConfig)
}
const reactFlowNodeData: INodeData = resolveVariables(
nodeToExecute.data,
reactFlowNodes,
incomingInput.question,
incomingInput.history
chatHistory
)
nodeToExecuteData = reactFlowNodeData
const startingNodes = nodes.filter((nd) => startingNodeIds.includes(nd.id))
this.chatflowPool.add(chatflowid, nodeToExecuteData, startingNodes, incomingInput?.overrideConfig)
}
@ -1504,11 +1513,18 @@ export class App {
let sessionId = undefined
if (nodeToExecuteData.instance) sessionId = checkMemorySessionId(nodeToExecuteData.instance, chatId)
const memoryType = this.findMemoryLabel(nodes, edges)
const memoryNode = this.findMemoryLabel(nodes, edges)
const memoryType = memoryNode?.data.label
let chatHistory: IMessage[] | string = incomingInput.history
if (memoryNode && !incomingInput.history && (incomingInput.chatId || incomingInput.overrideConfig?.sessionId)) {
chatHistory = await replaceChatHistory(memoryNode, incomingInput, this.AppDataSource, databaseEntities, logger)
}
let result = isStreamValid
? await nodeInstance.run(nodeToExecuteData, incomingInput.question, {
chatHistory: incomingInput.history,
chatflowid,
chatHistory,
socketIO,
socketIOClientId: incomingInput.socketIOClientId,
logger,
@ -1518,7 +1534,8 @@ export class App {
chatId
})
: await nodeInstance.run(nodeToExecuteData, incomingInput.question, {
chatHistory: incomingInput.history,
chatflowid,
chatHistory,
logger,
appDataSource: this.AppDataSource,
databaseEntities,

View File

@ -0,0 +1,20 @@
import { Request, Response, NextFunction } from 'express'
import sanitizeHtml from 'sanitize-html'
export function sanitizeMiddleware(req: Request, res: Response, next: NextFunction): void {
// decoding is necessary as the url is encoded by the browser
const decodedURI = decodeURI(req.url)
req.url = sanitizeHtml(decodedURI)
for (let p in req.query) {
if (Array.isArray(req.query[p])) {
const sanitizedQ = []
for (const q of req.query[p] as string[]) {
sanitizedQ.push(sanitizeHtml(q))
}
req.query[p] = sanitizedQ
} else {
req.query[p] = sanitizeHtml(req.query[p] as string)
}
}
next()
}

View File

@ -0,0 +1,147 @@
import { randomBytes, scryptSync, timingSafeEqual } from 'crypto'
import { ICommonObject } from 'flowise-components'
import moment from 'moment'
import fs from 'fs'
import path from 'path'
import logger from './logger'
/**
* Returns the api key path
* @returns {string}
*/
export const getAPIKeyPath = (): string => {
return process.env.APIKEY_PATH ? path.join(process.env.APIKEY_PATH, 'api.json') : path.join(__dirname, '..', '..', 'api.json')
}
/**
* Generate the api key
* @returns {string}
*/
export const generateAPIKey = (): string => {
const buffer = randomBytes(32)
return buffer.toString('base64')
}
/**
* Generate the secret key
* @param {string} apiKey
* @returns {string}
*/
export const generateSecretHash = (apiKey: string): string => {
const salt = randomBytes(8).toString('hex')
const buffer = scryptSync(apiKey, salt, 64) as Buffer
return `${buffer.toString('hex')}.${salt}`
}
/**
* Verify valid keys
* @param {string} storedKey
* @param {string} suppliedKey
* @returns {boolean}
*/
export const compareKeys = (storedKey: string, suppliedKey: string): boolean => {
const [hashedPassword, salt] = storedKey.split('.')
const buffer = scryptSync(suppliedKey, salt, 64) as Buffer
return timingSafeEqual(Buffer.from(hashedPassword, 'hex'), buffer)
}
/**
* Get API keys
* @returns {Promise<ICommonObject[]>}
*/
export const getAPIKeys = async (): Promise<ICommonObject[]> => {
try {
const content = await fs.promises.readFile(getAPIKeyPath(), 'utf8')
return JSON.parse(content)
} catch (error) {
const keyName = 'DefaultKey'
const apiKey = generateAPIKey()
const apiSecret = generateSecretHash(apiKey)
const content = [
{
keyName,
apiKey,
apiSecret,
createdAt: moment().format('DD-MMM-YY'),
id: randomBytes(16).toString('hex')
}
]
await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8')
return content
}
}
/**
* Add new API key
* @param {string} keyName
* @returns {Promise<ICommonObject[]>}
*/
export const addAPIKey = async (keyName: string): Promise<ICommonObject[]> => {
const existingAPIKeys = await getAPIKeys()
const apiKey = generateAPIKey()
const apiSecret = generateSecretHash(apiKey)
const content = [
...existingAPIKeys,
{
keyName,
apiKey,
apiSecret,
createdAt: moment().format('DD-MMM-YY'),
id: randomBytes(16).toString('hex')
}
]
await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8')
return content
}
/**
* Get API Key details
* @param {string} apiKey
* @returns {Promise<ICommonObject[]>}
*/
export const getApiKey = async (apiKey: string) => {
const existingAPIKeys = await getAPIKeys()
const keyIndex = existingAPIKeys.findIndex((key) => key.apiKey === apiKey)
if (keyIndex < 0) return undefined
return existingAPIKeys[keyIndex]
}
/**
* Update existing API key
* @param {string} keyIdToUpdate
* @param {string} newKeyName
* @returns {Promise<ICommonObject[]>}
*/
export const updateAPIKey = async (keyIdToUpdate: string, newKeyName: string): Promise<ICommonObject[]> => {
const existingAPIKeys = await getAPIKeys()
const keyIndex = existingAPIKeys.findIndex((key) => key.id === keyIdToUpdate)
if (keyIndex < 0) return []
existingAPIKeys[keyIndex].keyName = newKeyName
await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(existingAPIKeys), 'utf8')
return existingAPIKeys
}
/**
* Delete API key
* @param {string} keyIdToDelete
* @returns {Promise<ICommonObject[]>}
*/
export const deleteAPIKey = async (keyIdToDelete: string): Promise<ICommonObject[]> => {
const existingAPIKeys = await getAPIKeys()
const result = existingAPIKeys.filter((key) => key.id !== keyIdToDelete)
await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(result), 'utf8')
return result
}
/**
* Replace all api keys
* @param {ICommonObject[]} content
* @returns {Promise<void>}
*/
export const replaceAllAPIKeys = async (content: ICommonObject[]): Promise<void> => {
try {
await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8')
} catch (error) {
logger.error(error)
}
}

View File

@ -0,0 +1,36 @@
export function parsePrompt(prompt: string): any[] {
const promptObj = JSON.parse(prompt)
let response = []
if (promptObj.kwargs.messages) {
promptObj.kwargs.messages.forEach((message: any) => {
let messageType = message.id.includes('SystemMessagePromptTemplate')
? 'systemMessagePrompt'
: message.id.includes('HumanMessagePromptTemplate')
? 'humanMessagePrompt'
: message.id.includes('AIMessagePromptTemplate')
? 'aiMessagePrompt'
: 'template'
let messageTypeDisplay = message.id.includes('SystemMessagePromptTemplate')
? 'System Message'
: message.id.includes('HumanMessagePromptTemplate')
? 'Human Message'
: message.id.includes('AIMessagePromptTemplate')
? 'AI Message'
: 'Message'
let template = message.kwargs.prompt.kwargs.template
response.push({
type: messageType,
typeDisplay: messageTypeDisplay,
template: template
})
})
} else if (promptObj.kwargs.template) {
let template = promptObj.kwargs.template
response.push({
type: 'template',
typeDisplay: 'Prompt',
template: template
})
}
return response
}

View File

@ -1,33 +1,33 @@
import path from 'path'
import fs from 'fs'
import moment from 'moment'
import logger from './logger'
import {
IComponentCredentials,
IComponentNodes,
ICredentialDataDecrypted,
ICredentialReqBody,
IDepthQueue,
IExploredNode,
INodeData,
INodeDependencies,
INodeDirectedGraph,
INodeQueue,
IOverrideConfig,
IReactFlowEdge,
IReactFlowNode,
IVariableDict,
INodeData,
IOverrideConfig,
ICredentialDataDecrypted,
IComponentCredentials,
ICredentialReqBody
IncomingInput
} from '../Interface'
import { cloneDeep, get, isEqual } from 'lodash'
import {
ICommonObject,
convertChatHistoryToText,
getInputVariables,
IDatabaseEntity,
handleEscapeCharacters,
IMessage,
convertChatHistoryToText
ICommonObject,
IDatabaseEntity,
IMessage
} from 'flowise-components'
import { scryptSync, randomBytes, timingSafeEqual } from 'crypto'
import { randomBytes } from 'crypto'
import { AES, enc } from 'crypto-js'
import { ChatFlow } from '../database/entities/ChatFlow'
@ -217,7 +217,7 @@ export const buildLangchain = async (
depthQueue: IDepthQueue,
componentNodes: IComponentNodes,
question: string,
chatHistory: IMessage[],
chatHistory: IMessage[] | string,
chatId: string,
chatflowid: string,
appDataSource: DataSource,
@ -348,8 +348,8 @@ export const clearAllSessionMemory = async (
node.data.inputs.sessionId = sessionId
}
if (newNodeInstance.clearSessionMemory) {
await newNodeInstance?.clearSessionMemory(node.data, { chatId, appDataSource, databaseEntities, logger })
if (newNodeInstance.memoryMethods && newNodeInstance.memoryMethods.clearSessionMemory) {
await newNodeInstance.memoryMethods.clearSessionMemory(node.data, { chatId, appDataSource, databaseEntities, logger })
}
}
}
@ -381,8 +381,8 @@ export const clearSessionMemoryFromViewMessageDialog = async (
if (sessionId && node.data.inputs) node.data.inputs.sessionId = sessionId
if (newNodeInstance.clearSessionMemory) {
await newNodeInstance?.clearSessionMemory(node.data, { chatId, appDataSource, databaseEntities, logger })
if (newNodeInstance.memoryMethods && newNodeInstance.memoryMethods.clearSessionMemory) {
await newNodeInstance.memoryMethods.clearSessionMemory(node.data, { chatId, appDataSource, databaseEntities, logger })
return
}
}
@ -400,7 +400,7 @@ export const getVariableValue = (
paramValue: string,
reactFlowNodes: IReactFlowNode[],
question: string,
chatHistory: IMessage[],
chatHistory: IMessage[] | string,
isAcceptVariable = false
) => {
let returnVal = paramValue
@ -433,7 +433,10 @@ export const getVariableValue = (
}
if (isAcceptVariable && variableFullPath === CHAT_HISTORY_VAR_PREFIX) {
variableDict[`{{${variableFullPath}}}`] = handleEscapeCharacters(convertChatHistoryToText(chatHistory), false)
variableDict[`{{${variableFullPath}}}`] = handleEscapeCharacters(
typeof chatHistory === 'string' ? chatHistory : convertChatHistoryToText(chatHistory),
false
)
}
// Split by first occurrence of '.' to get just nodeId
@ -476,7 +479,7 @@ export const resolveVariables = (
reactFlowNodeData: INodeData,
reactFlowNodes: IReactFlowNode[],
question: string,
chatHistory: IMessage[]
chatHistory: IMessage[] | string
): INodeData => {
let flowNodeData = cloneDeep(reactFlowNodeData)
const types = 'inputs'
@ -555,9 +558,20 @@ export const isStartNodeDependOnInput = (startingNodes: IReactFlowNode[], nodes:
if (inputVariables.length > 0) return true
}
}
const whitelistNodeNames = ['vectorStoreToDocument', 'autoGPT']
const whitelistNodeNames = ['vectorStoreToDocument', 'autoGPT', 'chatPromptTemplate', 'promptTemplate'] //If these nodes are found, chatflow cannot be reused
for (const node of nodes) {
if (whitelistNodeNames.includes(node.data.name)) return true
if (node.data.name === 'chatPromptTemplate' || node.data.name === 'promptTemplate') {
let promptValues: ICommonObject = {}
const promptValuesRaw = node.data.inputs?.promptValues
if (promptValuesRaw) {
try {
promptValues = typeof promptValuesRaw === 'object' ? promptValuesRaw : JSON.parse(promptValuesRaw)
} catch (exception) {
console.error(exception)
}
}
if (getAllValuesFromJson(promptValues).includes(`{{${QUESTION_VAR_PREFIX}}}`)) return true
} else if (whitelistNodeNames.includes(node.data.name)) return true
}
return false
}
@ -593,147 +607,6 @@ export const isSameOverrideConfig = (
return false
}
/**
* Returns the api key path
* @returns {string}
*/
export const getAPIKeyPath = (): string => {
return process.env.APIKEY_PATH ? path.join(process.env.APIKEY_PATH, 'api.json') : path.join(__dirname, '..', '..', 'api.json')
}
/**
* Generate the api key
* @returns {string}
*/
export const generateAPIKey = (): string => {
const buffer = randomBytes(32)
return buffer.toString('base64')
}
/**
* Generate the secret key
* @param {string} apiKey
* @returns {string}
*/
export const generateSecretHash = (apiKey: string): string => {
const salt = randomBytes(8).toString('hex')
const buffer = scryptSync(apiKey, salt, 64) as Buffer
return `${buffer.toString('hex')}.${salt}`
}
/**
* Verify valid keys
* @param {string} storedKey
* @param {string} suppliedKey
* @returns {boolean}
*/
export const compareKeys = (storedKey: string, suppliedKey: string): boolean => {
const [hashedPassword, salt] = storedKey.split('.')
const buffer = scryptSync(suppliedKey, salt, 64) as Buffer
return timingSafeEqual(Buffer.from(hashedPassword, 'hex'), buffer)
}
/**
* Get API keys
* @returns {Promise<ICommonObject[]>}
*/
export const getAPIKeys = async (): Promise<ICommonObject[]> => {
try {
const content = await fs.promises.readFile(getAPIKeyPath(), 'utf8')
return JSON.parse(content)
} catch (error) {
const keyName = 'DefaultKey'
const apiKey = generateAPIKey()
const apiSecret = generateSecretHash(apiKey)
const content = [
{
keyName,
apiKey,
apiSecret,
createdAt: moment().format('DD-MMM-YY'),
id: randomBytes(16).toString('hex')
}
]
await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8')
return content
}
}
/**
* Add new API key
* @param {string} keyName
* @returns {Promise<ICommonObject[]>}
*/
export const addAPIKey = async (keyName: string): Promise<ICommonObject[]> => {
const existingAPIKeys = await getAPIKeys()
const apiKey = generateAPIKey()
const apiSecret = generateSecretHash(apiKey)
const content = [
...existingAPIKeys,
{
keyName,
apiKey,
apiSecret,
createdAt: moment().format('DD-MMM-YY'),
id: randomBytes(16).toString('hex')
}
]
await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8')
return content
}
/**
* Get API Key details
* @param {string} apiKey
* @returns {Promise<ICommonObject[]>}
*/
export const getApiKey = async (apiKey: string) => {
const existingAPIKeys = await getAPIKeys()
const keyIndex = existingAPIKeys.findIndex((key) => key.apiKey === apiKey)
if (keyIndex < 0) return undefined
return existingAPIKeys[keyIndex]
}
/**
* Update existing API key
* @param {string} keyIdToUpdate
* @param {string} newKeyName
* @returns {Promise<ICommonObject[]>}
*/
export const updateAPIKey = async (keyIdToUpdate: string, newKeyName: string): Promise<ICommonObject[]> => {
const existingAPIKeys = await getAPIKeys()
const keyIndex = existingAPIKeys.findIndex((key) => key.id === keyIdToUpdate)
if (keyIndex < 0) return []
existingAPIKeys[keyIndex].keyName = newKeyName
await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(existingAPIKeys), 'utf8')
return existingAPIKeys
}
/**
* Delete API key
* @param {string} keyIdToDelete
* @returns {Promise<ICommonObject[]>}
*/
export const deleteAPIKey = async (keyIdToDelete: string): Promise<ICommonObject[]> => {
const existingAPIKeys = await getAPIKeys()
const result = existingAPIKeys.filter((key) => key.id !== keyIdToDelete)
await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(result), 'utf8')
return result
}
/**
* Replace all api keys
* @param {ICommonObject[]} content
* @returns {Promise<void>}
*/
export const replaceAllAPIKeys = async (content: ICommonObject[]): Promise<void> => {
try {
await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8')
} catch (error) {
logger.error(error)
}
}
/**
* Map MimeType to InputField
* @param {string} mimeType
@ -844,7 +717,7 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component
*/
export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNodeData: INodeData) => {
const streamAvailableLLMs = {
'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama'],
'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama', 'awsChatBedrock'],
LLMs: ['azureOpenAI', 'openAI', 'ollama']
}
@ -1015,3 +888,67 @@ export const checkMemorySessionId = (instance: any, chatId: string): string | un
return instance.memory.chatHistory.sessionId
return undefined
}
/**
* Replace chatHistory if incomingInput.history is empty and sessionId/chatId is provided
* @param {IReactFlowNode} memoryNode
* @param {IncomingInput} incomingInput
* @param {DataSource} appDataSource
* @param {IDatabaseEntity} databaseEntities
* @param {any} logger
* @returns {string}
*/
export const replaceChatHistory = async (
memoryNode: IReactFlowNode,
incomingInput: IncomingInput,
appDataSource: DataSource,
databaseEntities: IDatabaseEntity,
logger: any
): Promise<string> => {
const nodeInstanceFilePath = memoryNode.data.filePath as string
const nodeModule = await import(nodeInstanceFilePath)
const newNodeInstance = new nodeModule.nodeClass()
if (incomingInput.overrideConfig?.sessionId && memoryNode.data.inputs) {
memoryNode.data.inputs.sessionId = incomingInput.overrideConfig.sessionId
}
if (newNodeInstance.memoryMethods && newNodeInstance.memoryMethods.getChatMessages) {
return await newNodeInstance.memoryMethods.getChatMessages(memoryNode.data, {
chatId: incomingInput.chatId,
appDataSource,
databaseEntities,
logger
})
}
return ''
}
/**
* Get all values from a JSON object
* @param {any} obj
* @returns {any[]}
*/
export const getAllValuesFromJson = (obj: any): any[] => {
const values: any[] = []
function extractValues(data: any) {
if (typeof data === 'object' && data !== null) {
if (Array.isArray(data)) {
for (const item of data) {
extractValues(item)
}
} else {
for (const key in data) {
extractValues(data[key])
}
}
} else {
values.push(data)
}
}
extractValues(obj)
return values
}

View File

@ -1,6 +1,6 @@
{
"name": "flowise-ui",
"version": "1.4.0",
"version": "1.4.3",
"license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://flowiseai.com",
"author": {

View File

@ -1,9 +0,0 @@
import client from './client'
const getExportDatabase = () => client.get('/database/export')
const createLoadDatabase = (body) => client.post('/database/load', body)
export default {
getExportDatabase,
createLoadDatabase
}

View File

@ -0,0 +1,9 @@
import client from './client'
const getAvailablePrompts = (body) => client.post(`/prompts-list`, body)
const getPrompt = (body) => client.post(`/load-prompt`, body)
export default {
getAvailablePrompts,
getPrompt
}

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 10 KiB

View File

@ -1,7 +1,6 @@
import { useState, useRef, useEffect } from 'react'
import PropTypes from 'prop-types'
import { useSelector, useDispatch } from 'react-redux'
import { useNavigate } from 'react-router-dom'
import { useSelector } from 'react-redux'
// material-ui
import { useTheme } from '@mui/material/styles'
@ -26,16 +25,10 @@ import PerfectScrollbar from 'react-perfect-scrollbar'
// project imports
import MainCard from 'ui-component/cards/MainCard'
import Transitions from 'ui-component/extended/Transitions'
import { BackdropLoader } from 'ui-component/loading/BackdropLoader'
import AboutDialog from 'ui-component/dialog/AboutDialog'
// assets
import { IconLogout, IconSettings, IconFileExport, IconFileDownload, IconInfoCircle } from '@tabler/icons'
// API
import databaseApi from 'api/database'
import { SET_MENU } from 'store/actions'
import { IconLogout, IconSettings, IconInfoCircle } from '@tabler/icons'
import './index.css'
@ -43,17 +36,13 @@ import './index.css'
const ProfileSection = ({ username, handleLogout }) => {
const theme = useTheme()
const dispatch = useDispatch()
const navigate = useNavigate()
const customization = useSelector((state) => state.customization)
const [open, setOpen] = useState(false)
const [loading, setLoading] = useState(false)
const [aboutDialogOpen, setAboutDialogOpen] = useState(false)
const anchorRef = useRef(null)
const uploadRef = useRef(null)
const handleClose = (event) => {
if (anchorRef.current && anchorRef.current.contains(event.target)) {
@ -66,56 +55,6 @@ const ProfileSection = ({ username, handleLogout }) => {
setOpen((prevOpen) => !prevOpen)
}
const handleExportDB = async () => {
setOpen(false)
try {
const response = await databaseApi.getExportDatabase()
const exportItems = response.data
let dataStr = JSON.stringify(exportItems, null, 2)
let dataUri = 'data:application/json;charset=utf-8,' + encodeURIComponent(dataStr)
let exportFileDefaultName = `DB.json`
let linkElement = document.createElement('a')
linkElement.setAttribute('href', dataUri)
linkElement.setAttribute('download', exportFileDefaultName)
linkElement.click()
} catch (e) {
console.error(e)
}
}
const handleFileUpload = (e) => {
if (!e.target.files) return
const file = e.target.files[0]
const reader = new FileReader()
reader.onload = async (evt) => {
if (!evt?.target?.result) {
return
}
const { result } = evt.target
if (result.includes(`"chatmessages":[`) && result.includes(`"chatflows":[`) && result.includes(`"apikeys":[`)) {
dispatch({ type: SET_MENU, opened: false })
setLoading(true)
try {
await databaseApi.createLoadDatabase(JSON.parse(result))
setLoading(false)
navigate('/', { replace: true })
navigate(0)
} catch (e) {
console.error(e)
setLoading(false)
}
} else {
alert('Incorrect Flowise Database Format')
}
}
reader.readAsText(file)
}
const prevOpen = useRef(open)
useEffect(() => {
if (prevOpen.current === true && open === false) {
@ -196,27 +135,6 @@ const ProfileSection = ({ username, handleLogout }) => {
}
}}
>
<ListItemButton
sx={{ borderRadius: `${customization.borderRadius}px` }}
onClick={() => {
setOpen(false)
uploadRef.current.click()
}}
>
<ListItemIcon>
<IconFileDownload stroke={1.5} size='1.3rem' />
</ListItemIcon>
<ListItemText primary={<Typography variant='body2'>Load Database</Typography>} />
</ListItemButton>
<ListItemButton
sx={{ borderRadius: `${customization.borderRadius}px` }}
onClick={handleExportDB}
>
<ListItemIcon>
<IconFileExport stroke={1.5} size='1.3rem' />
</ListItemIcon>
<ListItemText primary={<Typography variant='body2'>Export Database</Typography>} />
</ListItemButton>
<ListItemButton
sx={{ borderRadius: `${customization.borderRadius}px` }}
onClick={() => {
@ -249,8 +167,6 @@ const ProfileSection = ({ username, handleLogout }) => {
</Transitions>
)}
</Popper>
<input ref={uploadRef} type='file' hidden accept='.json' onChange={(e) => handleFileUpload(e)} />
<BackdropLoader open={loading} />
<AboutDialog show={aboutDialogOpen} onCancel={() => setAboutDialogOpen(false)} />
</>
)

View File

@ -23,7 +23,6 @@ import useConfirm from 'hooks/useConfirm'
import { uiBaseURL } from '../../store/constant'
import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '../../store/actions'
import ConfirmDialog from '../dialog/ConfirmDialog'
import SaveChatflowDialog from '../dialog/SaveChatflowDialog'
import TagDialog from '../dialog/TagDialog'
@ -286,7 +285,6 @@ export default function FlowListMenu({ chatflow, updateFlowsApi }) {
Delete
</MenuItem>
</StyledMenu>
<ConfirmDialog />
<SaveChatflowDialog
show={flowDialogOpen}
dialogProps={{

View File

@ -0,0 +1,587 @@
import { createPortal } from 'react-dom'
import { useState, useEffect } from 'react'
import { useDispatch, useSelector } from 'react-redux'
import PropTypes from 'prop-types'
import rehypeMathjax from 'rehype-mathjax'
import rehypeRaw from 'rehype-raw'
import remarkGfm from 'remark-gfm'
import remarkMath from 'remark-math'
// MUI
import {
Box,
Button,
Card,
CardContent,
Dialog,
DialogActions,
DialogContent,
DialogTitle,
Chip,
Grid,
InputLabel,
List,
ListItemButton,
ListItemText,
OutlinedInput,
Select,
Typography,
Stack,
IconButton,
FormControl,
Checkbox,
MenuItem
} from '@mui/material'
import MuiAccordion from '@mui/material/Accordion'
import MuiAccordionSummary from '@mui/material/AccordionSummary'
import MuiAccordionDetails from '@mui/material/AccordionDetails'
import ExpandMoreIcon from '@mui/icons-material/ExpandMore'
import ArrowForwardIosSharpIcon from '@mui/icons-material/ArrowForwardIosSharp'
import ClearIcon from '@mui/icons-material/Clear'
import { styled } from '@mui/material/styles'
//Project Import
import { StyledButton } from 'ui-component/button/StyledButton'
import { MemoizedReactMarkdown } from 'ui-component/markdown/MemoizedReactMarkdown'
import { CodeBlock } from 'ui-component/markdown/CodeBlock'
import promptEmptySVG from 'assets/images/prompt_empty.svg'
import useApi from 'hooks/useApi'
import promptApi from 'api/prompt'
import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from 'store/actions'
const NewLineToBr = ({ children = '' }) => {
return children.split('\n').reduce(function (arr, line) {
return arr.concat(line, <br />)
}, [])
}
const Accordion = styled((props) => <MuiAccordion disableGutters elevation={0} square {...props} />)(({ theme }) => ({
border: `1px solid ${theme.palette.divider}`,
'&:not(:last-child)': {
borderBottom: 0
},
'&:before': {
display: 'none'
}
}))
const AccordionSummary = styled((props) => (
<MuiAccordionSummary expandIcon={<ArrowForwardIosSharpIcon sx={{ fontSize: '0.9rem' }} />} {...props} />
))(({ theme }) => ({
backgroundColor: theme.palette.mode === 'dark' ? 'rgba(255, 255, 255, .05)' : 'rgba(0, 0, 0, .03)',
flexDirection: 'row-reverse',
'& .MuiAccordionSummary-expandIconWrapper.Mui-expanded': {
transform: 'rotate(180deg)'
},
'& .MuiAccordionSummary-content': {
marginLeft: theme.spacing(1)
}
}))
const AccordionDetails = styled(MuiAccordionDetails)(({ theme }) => ({
padding: theme.spacing(2),
borderTop: '1px solid rgba(0, 0, 0, .125)'
}))
const PromptLangsmithHubDialog = ({ promptType, show, onCancel, onSubmit }) => {
const portalElement = document.getElementById('portal')
const dispatch = useDispatch()
const customization = useSelector((state) => state.customization)
const getAvailablePromptsApi = useApi(promptApi.getAvailablePrompts)
useEffect(() => {
if (show) dispatch({ type: SHOW_CANVAS_DIALOG })
else dispatch({ type: HIDE_CANVAS_DIALOG })
return () => dispatch({ type: HIDE_CANVAS_DIALOG })
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [show, dispatch])
useEffect(() => {
if (promptType) {
getAvailablePromptsApi.request({ tags: promptType === 'template' ? 'StringPromptTemplate&' : 'ChatPromptTemplate&' })
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [promptType])
useEffect(() => {
if (getAvailablePromptsApi.data && getAvailablePromptsApi.data.repos) {
setAvailablePrompNameList(getAvailablePromptsApi.data.repos)
if (getAvailablePromptsApi.data.repos?.length) handleListItemClick(0, getAvailablePromptsApi.data.repos)
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [getAvailablePromptsApi.data])
const ITEM_HEIGHT = 48
const ITEM_PADDING_TOP = 8
const MenuProps = {
PaperProps: {
style: {
maxHeight: ITEM_HEIGHT * 4.5 + ITEM_PADDING_TOP,
width: 250
}
}
}
const models = [
{ id: 101, name: 'anthropic:claude-instant-1' },
{ id: 102, name: 'anthropic:claude-instant-1.2' },
{ id: 103, name: 'anthropic:claude-2' },
{ id: 104, name: 'google:palm-2-chat-bison' },
{ id: 105, name: 'google:palm-2-codechat-bison' },
{ id: 106, name: 'google:palm-2-text-bison' },
{ id: 107, name: 'meta:llama-2-13b-chat' },
{ id: 108, name: 'meta:llama-2-70b-chat' },
{ id: 109, name: 'openai:gpt-3.5-turbo' },
{ id: 110, name: 'openai:gpt-4' },
{ id: 111, name: 'openai:text-davinci-003' }
]
const [modelName, setModelName] = useState([])
const usecases = [
{ id: 201, name: 'Agents' },
{ id: 202, name: 'Agent Stimulation' },
{ id: 203, name: 'Autonomous agents' },
{ id: 204, name: 'Classification' },
{ id: 205, name: 'Chatbots' },
{ id: 206, name: 'Code understanding' },
{ id: 207, name: 'Code writing' },
{ id: 208, name: 'Evaluation' },
{ id: 209, name: 'Extraction' },
{ id: 210, name: 'Interacting with APIs' },
{ id: 211, name: 'Multi-modal' },
{ id: 212, name: 'QA over documents' },
{ id: 213, name: 'Self-checking' },
{ id: 214, name: 'SQL' },
{ id: 215, name: 'Summarization' },
{ id: 216, name: 'Tagging' }
]
const [usecase, setUsecase] = useState([])
const languages = [
{ id: 301, name: 'Chinese' },
{ id: 302, name: 'English' },
{ id: 303, name: 'French' },
{ id: 304, name: 'German' },
{ id: 305, name: 'Russian' },
{ id: 306, name: 'Spanish' }
]
const [language, setLanguage] = useState([])
const [availablePrompNameList, setAvailablePrompNameList] = useState([])
const [selectedPrompt, setSelectedPrompt] = useState({})
const [accordionExpanded, setAccordionExpanded] = useState(['prompt'])
const handleAccordionChange = (accordionName) => (event, isExpanded) => {
const accordians = [...accordionExpanded]
if (!isExpanded) setAccordionExpanded(accordians.filter((accr) => accr !== accordionName))
else {
accordians.push(accordionName)
setAccordionExpanded(accordians)
}
}
const handleListItemClick = async (index, overridePromptNameList = []) => {
const prompt = overridePromptNameList.length ? overridePromptNameList[index] : availablePrompNameList[index]
if (!prompt.detailed) {
const createResp = await promptApi.getPrompt({
promptName: prompt.full_name
})
if (createResp.data) {
prompt.detailed = createResp.data.templates
}
}
setSelectedPrompt(prompt)
}
const fetchPrompts = async () => {
let tags = promptType === 'template' ? 'StringPromptTemplate&' : 'ChatPromptTemplate&'
modelName.forEach((item) => {
tags += `tags=${item.name}&`
})
usecase.forEach((item) => {
tags += `tags=${item.name}&`
})
language.forEach((item) => {
tags += `tags=${item.name}&`
})
getAvailablePromptsApi.request({ tags: tags })
}
const removeDuplicates = (value) => {
let duplicateRemoved = []
value.forEach((item) => {
if (value.filter((o) => o.id === item.id).length === 1) {
duplicateRemoved.push(item)
}
})
return duplicateRemoved
}
const handleModelChange = (event) => {
const {
target: { value }
} = event
setModelName(removeDuplicates(value))
}
const handleUsecaseChange = (event) => {
const {
target: { value }
} = event
setUsecase(removeDuplicates(value))
}
const handleLanguageChange = (event) => {
const {
target: { value }
} = event
setLanguage(removeDuplicates(value))
}
const component = show ? (
<Dialog
onClose={onCancel}
open={show}
fullWidth
maxWidth={'lg'}
aria-labelledby='prompt-dialog-title'
aria-describedby='prompt-dialog-description'
>
<DialogTitle sx={{ fontSize: '1rem' }} id='prompt-dialog-title'>
Langchain Hub ({promptType === 'template' ? 'PromptTemplate' : 'ChatPromptTemplate'})
</DialogTitle>
<DialogContent dividers sx={{ p: 1 }}>
<Box sx={{ display: 'flex', flexDirection: 'row', p: 2, pt: 1, alignItems: 'center' }}>
<FormControl sx={{ mr: 1, width: '30%' }}>
<InputLabel size='small' id='model-checkbox-label'>
Model
</InputLabel>
<Select
id='model-checkbox'
labelId='model-checkbox-label'
multiple
size='small'
value={modelName}
onChange={handleModelChange}
input={<OutlinedInput label='Model' />}
renderValue={(selected) => selected.map((x) => x.name).join(', ')}
endAdornment={
modelName.length ? (
<IconButton sx={{ mr: 2 }} onClick={() => setModelName([])}>
<ClearIcon style={{ width: 20, height: 20 }} />
</IconButton>
) : (
false
)
}
sx={{
'.MuiSvgIcon-root ': {
fill: customization.isDarkMode ? 'white !important' : ''
}
}}
MenuProps={MenuProps}
>
{models.map((variant) => (
<MenuItem key={variant.id} value={variant}>
<Checkbox id={variant.id} checked={modelName.findIndex((item) => item.id === variant.id) >= 0} />
<ListItemText primary={variant.name} />
</MenuItem>
))}
</Select>
</FormControl>
<FormControl sx={{ mr: 1, width: '30%' }}>
<InputLabel size='small' id='usecase-checkbox-label'>
Usecase
</InputLabel>
<Select
autoWidth={false}
labelId='usecase-checkbox-label'
id='usecase-checkbox'
multiple
size='small'
value={usecase}
onChange={handleUsecaseChange}
input={<OutlinedInput label='Usecase' />}
renderValue={(selected) => selected.map((x) => x.name).join(', ')}
endAdornment={
usecase.length ? (
<IconButton sx={{ mr: 2 }} onClick={() => setUsecase([])}>
<ClearIcon style={{ width: 20, height: 20 }} />
</IconButton>
) : (
false
)
}
sx={{
'.MuiSvgIcon-root ': {
fill: customization.isDarkMode ? 'white !important' : ''
}
}}
MenuProps={MenuProps}
>
{usecases.map((variant) => (
<MenuItem key={variant.id} value={variant}>
<Checkbox id={variant.id} checked={usecase.findIndex((item) => item.id === variant.id) >= 0} />
<ListItemText primary={variant.name} />
</MenuItem>
))}
</Select>
</FormControl>
<FormControl sx={{ mr: 1, width: '30%' }}>
<InputLabel size='small' id='language-checkbox-label'>
Language
</InputLabel>
<Select
labelId='language-checkbox-label'
id='language-checkbox'
multiple
size='small'
value={language}
onChange={handleLanguageChange}
input={<OutlinedInput label='language' />}
renderValue={(selected) => selected.map((x) => x.name).join(', ')}
endAdornment={
language.length ? (
<IconButton sx={{ mr: 2 }} onClick={() => setLanguage([])}>
<ClearIcon style={{ width: 20, height: 20 }} />
</IconButton>
) : (
false
)
}
sx={{
'.MuiSvgIcon-root ': {
fill: customization.isDarkMode ? 'white !important' : ''
}
}}
MenuProps={MenuProps}
>
{languages.map((variant) => (
<MenuItem key={variant.id} value={variant}>
<Checkbox id={variant.id} checked={language.findIndex((item) => item.id === variant.id) >= 0} />
<ListItemText primary={variant.name} />
</MenuItem>
))}
</Select>
</FormControl>
<FormControl sx={{ width: '10%' }}>
<Button disableElevation variant='outlined' onClick={fetchPrompts}>
Search
</Button>
</FormControl>
</Box>
{availablePrompNameList && availablePrompNameList.length == 0 && (
<Stack sx={{ alignItems: 'center', justifyContent: 'center', width: '100%', pb: 3 }} flexDirection='column'>
<Box sx={{ p: 5, height: 'auto' }}>
<img style={{ objectFit: 'cover', height: '20vh', width: 'auto' }} src={promptEmptySVG} alt='promptEmptySVG' />
</Box>
<div>No Available Prompts</div>
</Stack>
)}
{availablePrompNameList && availablePrompNameList.length > 0 && (
<Stack sx={{ alignItems: 'center', justifyContent: 'center', width: '100%' }} flexDirection='column'>
<Box sx={{ width: '100%', p: 2 }}>
<Grid xs={12} container spacing={1} justifyContent='center' alignItems='center'>
<Grid xs={4} item sx={{ textAlign: 'left' }}>
<Box sx={{ width: '100%', maxWidth: 360 }}>
<Card variant='outlined' sx={{ height: 470, overflow: 'auto', borderRadius: 0 }}>
<CardContent sx={{ p: 1 }}>
<Typography sx={{ fontSize: 10 }} color='text.secondary' gutterBottom>
Available Prompts
</Typography>
<List component='nav' aria-label='secondary mailbox folder'>
{availablePrompNameList.map((item, index) => (
<ListItemButton
key={item.id}
selected={item.id === selectedPrompt?.id}
onClick={() => handleListItemClick(index)}
>
<div style={{ display: 'flex', flexDirection: 'column' }}>
<Typography sx={{ fontSize: 16, p: 1, fontWeight: 500 }}>
{item.full_name}
</Typography>
<div
style={{
display: 'flex',
flexDirection: 'row',
flexWrap: 'wrap',
marginTop: 5
}}
>
{item.tags.map((tag, index) => (
<Chip
key={index}
label={tag}
style={{ marginRight: 5, marginBottom: 5 }}
/>
))}
</div>
</div>
</ListItemButton>
))}
</List>
</CardContent>
</Card>
</Box>
</Grid>
<Grid xs={8} item sx={{ textAlign: 'left' }}>
<Box sx={{ display: 'flex', flexDirection: 'column' }}>
<Card sx={{ height: 470, overflow: 'auto' }}>
<CardContent sx={{ p: 0.5 }}>
<Accordion
expanded={accordionExpanded.includes('prompt')}
onChange={handleAccordionChange('prompt')}
>
<AccordionSummary
aria-controls='panel2d-content'
expandIcon={<ExpandMoreIcon />}
id='panel2d-header'
>
<Typography>Prompt</Typography>
</AccordionSummary>
<AccordionDetails>
<Typography sx={{ wordWrap: 'true' }} color='text.primary'>
{selectedPrompt?.detailed?.map((item) => (
<>
<Typography sx={{ fontSize: 12 }} color='text.secondary' gutterBottom>
{item.typeDisplay.toUpperCase()}
</Typography>
<Typography>
<p
style={{
whiteSpace: 'pre-wrap -moz-pre-wrap -pre-wrap -o-pre-wrap',
wordWrap: 'break-word',
fontFamily: 'inherit',
wordSpacing: '0.1rem',
lineHeight: '1.5rem'
}}
>
<NewLineToBr>{item.template}</NewLineToBr>
</p>
</Typography>
</>
))}
</Typography>
</AccordionDetails>
</Accordion>
<Accordion
expanded={accordionExpanded.includes('description')}
onChange={handleAccordionChange('description')}
>
<AccordionSummary
aria-controls='panel1d-content'
expandIcon={<ExpandMoreIcon />}
id='panel1d-header'
>
<Typography>Description</Typography>
</AccordionSummary>
<AccordionDetails>
<Typography
sx={{ wordWrap: 'true', wordSpacing: '0.1rem', lineHeight: '1.5rem' }}
color='text.primary'
>
{selectedPrompt?.description}
</Typography>
</AccordionDetails>
</Accordion>
<Accordion
expanded={accordionExpanded.includes('readme')}
onChange={handleAccordionChange('readme')}
>
<AccordionSummary
expandIcon={<ExpandMoreIcon />}
aria-controls='panel3d-content'
id='panel3d-header'
>
<Typography>Readme</Typography>
</AccordionSummary>
<AccordionDetails>
<div
style={{
lineHeight: 1.75,
'& a': {
display: 'block',
marginRight: '2.5rem',
wordWrap: 'break-word',
color: '#16bed7',
fontWeight: 500
},
'& a:hover': { opacity: 0.8 },
'& code': {
color: '#0ab126',
fontWeight: 500,
whiteSpace: 'pre-wrap !important'
}
}}
>
<MemoizedReactMarkdown
remarkPlugins={[remarkGfm, remarkMath]}
rehypePlugins={[rehypeMathjax, rehypeRaw]}
components={{
code({ inline, className, children, ...props }) {
const match = /language-(\w+)/.exec(className || '')
return !inline ? (
<CodeBlock
key={Math.random()}
isDialog={true}
language={(match && match[1]) || ''}
value={String(children).replace(/\n$/, '')}
{...props}
/>
) : (
<code className={className} {...props}>
{children}
</code>
)
}
}}
>
{selectedPrompt?.readme}
</MemoizedReactMarkdown>
</div>
</AccordionDetails>
</Accordion>
</CardContent>
</Card>
</Box>
</Grid>
</Grid>
</Box>
</Stack>
)}
</DialogContent>
{availablePrompNameList && availablePrompNameList.length > 0 && (
<DialogActions>
<Button onClick={onCancel}>Cancel</Button>
<StyledButton
disabled={!selectedPrompt?.detailed}
onClick={() => onSubmit(selectedPrompt.detailed)}
variant='contained'
>
Load
</StyledButton>
</DialogActions>
)}
</Dialog>
) : null
return createPortal(component, portalElement)
}
PromptLangsmithHubDialog.propTypes = {
promptType: PropTypes.string,
show: PropTypes.bool,
onCancel: PropTypes.func,
onSubmit: PropTypes.func
}
export default PromptLangsmithHubDialog

View File

@ -69,7 +69,9 @@ export const FlowListTable = ({ data, images, filterFunction, updateFlowsApi })
<Typography
sx={{ fontSize: '1.2rem', fontWeight: 500, overflowWrap: 'break-word', whiteSpace: 'pre-line' }}
>
<Button onClick={() => goToCanvas(row)}>{row.templateName || row.name}</Button>
<Button onClick={() => goToCanvas(row)} sx={{ textAlign: 'left' }}>
{row.templateName || row.name}
</Button>
</Typography>
</TableCell>
<TableCell key='1'>
@ -145,8 +147,8 @@ export const FlowListTable = ({ data, images, filterFunction, updateFlowsApi })
}
FlowListTable.propTypes = {
data: PropTypes.object,
images: PropTypes.array,
data: PropTypes.array,
images: PropTypes.object,
filterFunction: PropTypes.func,
updateFlowsApi: PropTypes.object
}

View File

@ -68,10 +68,14 @@ const AddNodes = ({ nodesData, node }) => {
else newNodes.push(vsNode)
}
delete obj['Vector Stores']
obj['Vector Stores;DEPRECATING'] = deprecatingNodes
accordianCategories['Vector Stores;DEPRECATING'] = isFilter ? true : false
obj['Vector Stores;NEW'] = newNodes
accordianCategories['Vector Stores;NEW'] = isFilter ? true : false
if (deprecatingNodes.length) {
obj['Vector Stores;DEPRECATING'] = deprecatingNodes
accordianCategories['Vector Stores;DEPRECATING'] = isFilter ? true : false
}
if (newNodes.length) {
obj['Vector Stores;NEW'] = newNodes
accordianCategories['Vector Stores;NEW'] = isFilter ? true : false
}
setNodes(obj)
}

View File

@ -6,6 +6,7 @@ import { useSelector } from 'react-redux'
// material-ui
import { useTheme, styled } from '@mui/material/styles'
import { Box, Typography, Tooltip, IconButton, Button } from '@mui/material'
import IconAutoFixHigh from '@mui/icons-material/AutoFixHigh'
import { tooltipClasses } from '@mui/material/Tooltip'
import { IconArrowsMaximize, IconEdit, IconAlertTriangle } from '@tabler/icons'
@ -31,6 +32,7 @@ import { getInputVariables } from 'utils/genericHelper'
// const
import { FLOWISE_CREDENTIAL_ID } from 'store/constant'
import PromptLangsmithHubDialog from '../../ui-component/dialog/PromptLangsmithHubDialog'
const EDITABLE_OPTIONS = ['selectedTool', 'selectedAssistant']
@ -56,6 +58,7 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isA
const [reloadTimestamp, setReloadTimestamp] = useState(Date.now().toString())
const [showFormatPromptValuesDialog, setShowFormatPromptValuesDialog] = useState(false)
const [formatPromptValuesDialogProps, setFormatPromptValuesDialogProps] = useState({})
const [showPromptHubDialog, setShowPromptHubDialog] = useState(false)
const onExpandDialogClicked = (value, inputParam) => {
const dialogProp = {
@ -69,6 +72,17 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isA
setShowExpandDialog(true)
}
const onShowPromptHubButtonClicked = () => {
setShowPromptHubDialog(true)
}
const onShowPromptHubButtonSubmit = (templates) => {
setShowPromptHubDialog(false)
for (const t of templates) {
if (Object.prototype.hasOwnProperty.call(data.inputs, t.type)) {
data.inputs[t.type] = t.template
}
}
}
const onFormatPromptValuesClicked = (value, inputParam) => {
// Preset values if the field is format prompt values
let inputValue = value
@ -209,6 +223,31 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isA
</CustomWidthTooltip>
)}
<Box sx={{ p: 2 }}>
{(data.name === 'promptTemplate' || data.name === 'chatPromptTemplate') &&
(inputParam.name === 'template' || inputParam.name === 'systemMessagePrompt') && (
<>
<Button
style={{
display: 'flex',
flexDirection: 'row',
width: '100%'
}}
disabled={disabled}
sx={{ borderRadius: 25, width: '100%', mb: 2, mt: 0 }}
variant='outlined'
onClick={() => onShowPromptHubButtonClicked()}
endIcon={<IconAutoFixHigh />}
>
Langchain Hub
</Button>
<PromptLangsmithHubDialog
promptType={inputParam.name}
show={showPromptHubDialog}
onCancel={() => setShowPromptHubDialog(false)}
onSubmit={onShowPromptHubButtonSubmit}
></PromptLangsmithHubDialog>
</>
)}
<div style={{ display: 'flex', flexDirection: 'row' }}>
<Typography>
{inputParam.label}
@ -260,6 +299,7 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isA
}}
/>
)}
{inputParam.type === 'file' && (
<File
disabled={disabled}

View File

@ -170,7 +170,7 @@ const Canvas = () => {
try {
await chatflowsApi.deleteChatflow(chatflow.id)
localStorage.removeItem(`${chatflow.id}_INTERNAL`)
navigate(-1)
navigate('/')
} catch (error) {
const errorData = error.response.data || `${error.response.status}: ${error.response.statusText}`
enqueueSnackbar({

View File

@ -12,6 +12,7 @@ import ItemCard from 'ui-component/cards/ItemCard'
import { gridSpacing } from 'store/constant'
import WorkflowEmptySVG from 'assets/images/workflow_empty.svg'
import LoginDialog from 'ui-component/dialog/LoginDialog'
import ConfirmDialog from 'ui-component/dialog/ConfirmDialog'
// API
import chatflowsApi from 'api/chatflows'
@ -160,7 +161,6 @@ const Chatflows = () => {
variant='contained'
value='card'
title='Card View'
selectedColor='#00abc0'
>
<IconLayoutGrid />
</ToggleButton>
@ -212,6 +212,7 @@ const Chatflows = () => {
</Stack>
)}
<LoginDialog show={loginDialogOpen} dialogProps={loginDialogProps} onConfirm={onLoginClick} />
<ConfirmDialog />
</MainCard>
)
}