- update LangchainJS version

- add types & interfaces

- small ui bug fix

- update marketplace files
This commit is contained in:
Henry 2023-04-13 18:32:00 +01:00
parent 02d8284f58
commit 9281b57ef1
47 changed files with 1178 additions and 1141 deletions

View File

@ -17,7 +17,7 @@ jobs:
strategy:
matrix:
platform: [ubuntu-latest]
node-version: [14.x, 16.x]
node-version: [18.15.0]
runs-on: ${{ matrix.platform }}
steps:

View File

@ -1,4 +1,9 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutor, AgentExecutor } from 'langchain/agents'
import { Tool } from 'langchain/tools'
import { BaseChatModel } from 'langchain/chat_models/base'
import { BaseChatMemory } from 'langchain/memory'
import { getBaseClasses } from '../../../src/utils'
class ConversationalAgent_Agents implements INode {
label: string
@ -17,6 +22,7 @@ class ConversationalAgent_Agents implements INode {
this.category = 'Agents'
this.icon = 'agent.svg'
this.description = 'Conversational agent for a chat model. It will utilize chat specific prompts'
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.inputs = [
{
label: 'Allowed Tools',
@ -37,16 +43,10 @@ class ConversationalAgent_Agents implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
return ['AgentExecutor']
}
async init(nodeData: INodeData): Promise<any> {
const { initializeAgentExecutor } = await import('langchain/agents')
const model = nodeData.inputs?.model
const tools = nodeData.inputs?.tools
const memory = nodeData.inputs?.memory
const model = nodeData.inputs?.model as BaseChatModel
const tools = nodeData.inputs?.tools as Tool[]
const memory = nodeData.inputs?.memory as BaseChatMemory
const executor = await initializeAgentExecutor(tools, model, 'chat-conversational-react-description', true)
executor.memory = memory
@ -54,7 +54,7 @@ class ConversationalAgent_Agents implements INode {
}
async run(nodeData: INodeData, input: string): Promise<string> {
const executor = nodeData.instance
const executor = nodeData.instance as AgentExecutor
const result = await executor.call({ input })
return result?.output

View File

@ -1,6 +1,10 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutor, AgentExecutor } from 'langchain/agents'
import { Tool } from 'langchain/tools'
import { BaseChatModel } from 'langchain/chat_models/base'
import { getBaseClasses } from '../../../src/utils'
class MRLKAgentChat_Agents implements INode {
class MRKLAgentChat_Agents implements INode {
label: string
name: string
description: string
@ -11,12 +15,13 @@ class MRLKAgentChat_Agents implements INode {
inputs: INodeParams[]
constructor() {
this.label = 'MRLK Agent for Chat Models'
this.name = 'mrlkAgentChat'
this.label = 'MRKL Agent for Chat Models'
this.name = 'mrklAgentChat'
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'agent.svg'
this.description = 'Agent that uses the ReAct Framework to decide what action to take, optimized to be used with Chat Models'
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.inputs = [
{
label: 'Allowed Tools',
@ -32,27 +37,20 @@ class MRLKAgentChat_Agents implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
return ['AgentExecutor']
}
async init(nodeData: INodeData): Promise<any> {
const { initializeAgentExecutor } = await import('langchain/agents')
const model = nodeData.inputs?.model
const tools = nodeData.inputs?.tools
const model = nodeData.inputs?.model as BaseChatModel
const tools = nodeData.inputs?.tools as Tool[]
const executor = await initializeAgentExecutor(tools, model, 'chat-zero-shot-react-description', true)
return executor
}
async run(nodeData: INodeData, input: string): Promise<string> {
const executor = nodeData.instance
const executor = nodeData.instance as AgentExecutor
const result = await executor.call({ input })
return result?.output
}
}
module.exports = { nodeClass: MRLKAgentChat_Agents }
module.exports = { nodeClass: MRKLAgentChat_Agents }

View File

Before

Width:  |  Height:  |  Size: 650 B

After

Width:  |  Height:  |  Size: 650 B

View File

@ -1,6 +1,10 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutor, AgentExecutor } from 'langchain/agents'
import { Tool } from 'langchain/tools'
import { BaseLLM } from 'langchain/llms/base'
import { getBaseClasses } from '../../../src/utils'
class MRLKAgentLLM_Agents implements INode {
class MRKLAgentLLM_Agents implements INode {
label: string
name: string
description: string
@ -11,12 +15,13 @@ class MRLKAgentLLM_Agents implements INode {
inputs: INodeParams[]
constructor() {
this.label = 'MRLK Agent for LLMs'
this.name = 'mrlkAgentLLM'
this.label = 'MRKL Agent for LLMs'
this.name = 'mrklAgentLLM'
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'agent.svg'
this.description = 'Agent that uses the ReAct Framework to decide what action to take, optimized to be used with LLMs'
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.inputs = [
{
label: 'Allowed Tools',
@ -27,32 +32,25 @@ class MRLKAgentLLM_Agents implements INode {
{
label: 'LLM Model',
name: 'model',
type: 'BaseLanguageModel'
type: 'BaseLLM'
}
]
}
async getBaseClasses(): Promise<string[]> {
return ['AgentExecutor']
}
async init(nodeData: INodeData): Promise<any> {
const { initializeAgentExecutor } = await import('langchain/agents')
const model = nodeData.inputs?.model
const tools = nodeData.inputs?.tools
const model = nodeData.inputs?.model as BaseLLM
const tools = nodeData.inputs?.tools as Tool[]
const executor = await initializeAgentExecutor(tools, model, 'zero-shot-react-description', true)
return executor
}
async run(nodeData: INodeData, input: string): Promise<string> {
const executor = nodeData.instance
const executor = nodeData.instance as AgentExecutor
const result = await executor.call({ input })
return result?.output
}
}
module.exports = { nodeClass: MRLKAgentLLM_Agents }
module.exports = { nodeClass: MRKLAgentLLM_Agents }

View File

Before

Width:  |  Height:  |  Size: 650 B

After

Width:  |  Height:  |  Size: 650 B

View File

@ -1,5 +1,8 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ConversationalRetrievalQAChain } from 'langchain/chains'
import { BaseLLM } from 'langchain/llms/base'
import { BaseRetriever } from 'langchain/schema'
class ConversationalRetrievalQAChain_Chains implements INode {
label: string
@ -18,11 +21,12 @@ class ConversationalRetrievalQAChain_Chains implements INode {
this.icon = 'chain.svg'
this.category = 'Chains'
this.description = 'Document QA - built on RetrievalQAChain to provide a chat history component'
this.baseClasses = [this.type, ...getBaseClasses(ConversationalRetrievalQAChain)]
this.inputs = [
{
label: 'LLM',
name: 'llm',
type: 'BaseLanguageModel'
type: 'BaseLLM'
},
{
label: 'Vector Store Retriever',
@ -32,23 +36,16 @@ class ConversationalRetrievalQAChain_Chains implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
const { ConversationalRetrievalQAChain } = await import('langchain/chains')
return getBaseClasses(ConversationalRetrievalQAChain)
}
async init(nodeData: INodeData): Promise<any> {
const { ConversationalRetrievalQAChain } = await import('langchain/chains')
const llm = nodeData.inputs?.llm
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever
const llm = nodeData.inputs?.llm as BaseLLM
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever
const chain = ConversationalRetrievalQAChain.fromLLM(llm, vectorStoreRetriever)
return chain
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const chain = nodeData.instance
const chain = nodeData.instance as ConversationalRetrievalQAChain
let chatHistory = ''
if (options && options.chatHistory) {

View File

@ -1,5 +1,8 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { BasePromptTemplate } from 'langchain/prompts'
class LLMChain_Chains implements INode {
label: string
@ -18,10 +21,11 @@ class LLMChain_Chains implements INode {
this.icon = 'chain.svg'
this.category = 'Chains'
this.description = 'Chain to run queries against LLMs'
this.baseClasses = [this.type, ...getBaseClasses(LLMChain)]
this.inputs = [
{
label: 'LLM',
name: 'llm',
label: 'Language Model',
name: 'model',
type: 'BaseLanguageModel'
},
{
@ -43,24 +47,17 @@ class LLMChain_Chains implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
const { LLMChain } = await import('langchain/chains')
return getBaseClasses(LLMChain)
}
async init(nodeData: INodeData): Promise<any> {
const { LLMChain } = await import('langchain/chains')
const model = nodeData.inputs?.model as BaseLanguageModel
const prompt = nodeData.inputs?.prompt as BasePromptTemplate
const llm = nodeData.inputs?.llm
const prompt = nodeData.inputs?.prompt
const chain = new LLMChain({ llm, prompt })
const chain = new LLMChain({ llm: model, prompt })
return chain
}
async run(nodeData: INodeData, input: string): Promise<string> {
const inputVariables = nodeData.instance.prompt.inputVariables // ["product"]
const chain = nodeData.instance
const inputVariables = nodeData.instance.prompt.inputVariables as string[] // ["product"]
const chain = nodeData.instance as LLMChain
if (inputVariables.length === 1) {
const res = await chain.run(input)
@ -71,7 +68,7 @@ class LLMChain_Chains implements INode {
const promptValues = JSON.parse(promptValuesStr.replace(/\s/g, ''))
let seen = []
let seen: string[] = []
for (const variable of inputVariables) {
seen.push(variable)
@ -81,13 +78,17 @@ class LLMChain_Chains implements INode {
}
if (seen.length === 1) {
const lastValue = seen.pop()
if (!lastValue) throw new Error('Please provide Prompt Values')
const options = {
...promptValues,
[seen.pop()]: input
[lastValue]: input
}
const res = await chain.call(options)
return res?.text
} else throw new Error('Please provide Prompt Values')
} else {
throw new Error('Please provide Prompt Values')
}
} else {
const res = await chain.run(input)
return res

View File

@ -1,4 +1,8 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { RetrievalQAChain } from 'langchain/chains'
import { BaseLLM } from 'langchain/llms/base'
import { BaseRetriever } from 'langchain/schema'
import { getBaseClasses } from '../../../src/utils'
class RetrievalQAChain_Chains implements INode {
label: string
@ -17,11 +21,12 @@ class RetrievalQAChain_Chains implements INode {
this.icon = 'chain.svg'
this.category = 'Chains'
this.description = 'QA chain to answer a question based on the retrieved documents'
this.baseClasses = [this.type, ...getBaseClasses(RetrievalQAChain)]
this.inputs = [
{
label: 'LLM',
name: 'llm',
type: 'BaseLanguageModel'
type: 'BaseLLM'
},
{
label: 'Vector Store Retriever',
@ -31,21 +36,16 @@ class RetrievalQAChain_Chains implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
return ['BaseChain']
}
async init(nodeData: INodeData): Promise<any> {
const { RetrievalQAChain } = await import('langchain/chains')
const llm = nodeData.inputs?.llm
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever
const llm = nodeData.inputs?.llm as BaseLLM
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever
const chain = RetrievalQAChain.fromLLM(llm, vectorStoreRetriever)
return chain
}
async run(nodeData: INodeData, input: string): Promise<string> {
const chain = nodeData.instance
const chain = nodeData.instance as RetrievalQAChain
const obj = {
query: input
}

View File

@ -1,5 +1,6 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ChatOpenAI } from 'langchain/chat_models/openai'
class ChatOpenAI_ChatModels implements INode {
label: string
@ -18,6 +19,7 @@ class ChatOpenAI_ChatModels implements INode {
this.icon = 'openai.png'
this.category = 'Chat Models'
this.description = 'Wrapper around OpenAI large language models that use the Chat endpoint'
this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)]
this.inputs = [
{
label: 'OpenAI Api Key',
@ -29,6 +31,18 @@ class ChatOpenAI_ChatModels implements INode {
name: 'modelName',
type: 'options',
options: [
{
label: 'gpt-4',
name: 'gpt-4'
},
{
label: 'gpt-4-0314',
name: 'gpt-4-0314'
},
{
label: 'gpt-4-32k-0314',
name: 'gpt-4-32k-0314'
},
{
label: 'gpt-3.5-turbo',
name: 'gpt-3.5-turbo'
@ -51,14 +65,7 @@ class ChatOpenAI_ChatModels implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
const { ChatOpenAI } = await import('langchain/chat_models')
return getBaseClasses(ChatOpenAI)
}
async init(nodeData: INodeData): Promise<any> {
const { ChatOpenAI } = await import('langchain/chat_models')
const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string
const openAIApiKey = nodeData.inputs?.openAIApiKey as string

View File

@ -1,4 +1,6 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { TextSplitter } from 'langchain/text_splitter'
import { GithubRepoLoader, GithubRepoLoaderParams } from 'langchain/document_loaders/web/github'
class Github_DocumentLoaders implements INode {
label: string
@ -13,10 +15,11 @@ class Github_DocumentLoaders implements INode {
constructor() {
this.label = 'Github'
this.name = 'github'
this.type = 'Github'
this.type = 'Document'
this.icon = 'github.png'
this.category = 'Document Loaders'
this.description = `Load data from a GitHub repository`
this.baseClasses = [this.type]
this.inputs = [
{
label: 'Repo Link',
@ -46,23 +49,17 @@ class Github_DocumentLoaders implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
return ['Document']
}
async init(nodeData: INodeData): Promise<any> {
const { GithubRepoLoader } = await import('langchain/document_loaders')
const repoLink = nodeData.inputs?.repoLink as string
const branch = nodeData.inputs?.branch as string
const accessToken = nodeData.inputs?.accessToken as string
const textSplitter = nodeData.inputs?.textSplitter
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
const options = {
const options: GithubRepoLoaderParams = {
branch,
recursive: false,
unknown: 'warn'
} as any
}
if (accessToken) options.accessToken = accessToken

View File

@ -1,4 +1,6 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { TextSplitter } from 'langchain/text_splitter'
import { PDFLoader } from 'langchain/document_loaders/fs/pdf'
class Pdf_DocumentLoaders implements INode {
label: string
@ -13,10 +15,11 @@ class Pdf_DocumentLoaders implements INode {
constructor() {
this.label = 'Pdf File'
this.name = 'pdfFile'
this.type = 'PDF'
this.type = 'Document'
this.icon = 'pdf.svg'
this.category = 'Document Loaders'
this.description = `Load data from PDF files`
this.baseClasses = [this.type]
this.inputs = [
{
label: 'Pdf File',
@ -49,14 +52,8 @@ class Pdf_DocumentLoaders implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
return ['Document']
}
async init(nodeData: INodeData): Promise<any> {
const { PDFLoader } = await import('langchain/document_loaders')
const textSplitter = nodeData.inputs?.textSplitter
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
const pdfFileBase64 = nodeData.inputs?.pdfFile as string
const usage = nodeData.inputs?.usage as string

View File

@ -1,4 +1,6 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { TextSplitter } from 'langchain/text_splitter'
import { TextLoader } from 'langchain/document_loaders/fs/text'
class Text_DocumentLoaders implements INode {
label: string
@ -13,10 +15,11 @@ class Text_DocumentLoaders implements INode {
constructor() {
this.label = 'Text File'
this.name = 'textFile'
this.type = 'Text'
this.type = 'Document'
this.icon = 'textFile.svg'
this.category = 'Document Loaders'
this.description = `Load data from text files`
this.baseClasses = [this.type]
this.inputs = [
{
label: 'Txt File',
@ -33,13 +36,8 @@ class Text_DocumentLoaders implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
return ['Document']
}
async init(nodeData: INodeData): Promise<any> {
const { TextLoader } = await import('langchain/document_loaders')
const textSplitter = nodeData.inputs?.textSplitter
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
const txtFileBase64 = nodeData.inputs?.txtFile as string
const splitDataURI = txtFileBase64.split(',')
splitDataURI.pop()

View File

@ -1,5 +1,6 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { OpenAIEmbeddings } from 'langchain/embeddings/openai'
class OpenAIEmbedding_Embeddings implements INode {
label: string
@ -18,6 +19,7 @@ class OpenAIEmbedding_Embeddings implements INode {
this.icon = 'openai.png'
this.category = 'Embeddings'
this.description = 'OpenAI API to generate embeddings for a given text'
this.baseClasses = [this.type, ...getBaseClasses(OpenAIEmbeddings)]
this.inputs = [
{
label: 'OpenAI Api Key',
@ -27,13 +29,7 @@ class OpenAIEmbedding_Embeddings implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
const { OpenAIEmbeddings } = await import('langchain/embeddings')
return getBaseClasses(OpenAIEmbeddings)
}
async init(nodeData: INodeData): Promise<any> {
const { OpenAIEmbeddings } = await import('langchain/embeddings')
const openAIApiKey = nodeData.inputs?.openAIApiKey as string
const model = new OpenAIEmbeddings({ openAIApiKey })

View File

@ -1,5 +1,6 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { HuggingFaceInference } from 'langchain/llms/hf'
class HuggingFaceInference_LLMs implements INode {
label: string
@ -17,7 +18,8 @@ class HuggingFaceInference_LLMs implements INode {
this.type = 'HuggingFaceInference'
this.icon = 'huggingface.png'
this.category = 'LLMs'
this.description = 'Wrapper around OpenAI large language models'
this.description = 'Wrapper around HuggingFace large language models'
this.baseClasses = [this.type, ...getBaseClasses(HuggingFaceInference)]
this.inputs = [
{
label: 'Model',
@ -42,14 +44,7 @@ class HuggingFaceInference_LLMs implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
const { HuggingFaceInference } = await import('langchain/llms')
return getBaseClasses(HuggingFaceInference)
}
async init(nodeData: INodeData): Promise<any> {
const { HuggingFaceInference } = await import('langchain/llms')
const temperature = nodeData.inputs?.temperature as string
const model = nodeData.inputs?.model as string

View File

@ -1,5 +1,6 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { OpenAI } from 'langchain/llms/openai'
class OpenAI_LLMs implements INode {
label: string
@ -18,6 +19,7 @@ class OpenAI_LLMs implements INode {
this.icon = 'openai.png'
this.category = 'LLMs'
this.description = 'Wrapper around OpenAI large language models'
this.baseClasses = [this.type, ...getBaseClasses(OpenAI)]
this.inputs = [
{
label: 'OpenAI Api Key',
@ -59,14 +61,7 @@ class OpenAI_LLMs implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
const { OpenAI } = await import('langchain/llms')
return getBaseClasses(OpenAI)
}
async init(nodeData: INodeData): Promise<any> {
const { OpenAI } = await import('langchain/llms')
const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string
const openAIApiKey = nodeData.inputs?.openAIApiKey as string

View File

@ -1,5 +1,6 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { BufferMemory } from 'langchain/memory'
class BufferMemory_Memory implements INode {
label: string
@ -17,7 +18,8 @@ class BufferMemory_Memory implements INode {
this.type = 'BufferMemory'
this.icon = 'memory.svg'
this.category = 'Memory'
this.description = 'Perform calculations on response'
this.description = 'Remembers previous conversational back and forths directly'
this.baseClasses = [this.type, ...getBaseClasses(BufferMemory)]
this.inputs = [
{
label: 'Memory Key',
@ -34,13 +36,7 @@ class BufferMemory_Memory implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
const { BufferMemory } = await import('langchain/memory')
return getBaseClasses(BufferMemory)
}
async init(nodeData: INodeData): Promise<any> {
const { BufferMemory } = await import('langchain/memory')
const memoryKey = nodeData.inputs?.memoryKey as string
const inputKey = nodeData.inputs?.inputKey as string
return new BufferMemory({

View File

@ -1,5 +1,6 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate } from 'langchain/prompts'
class ChatPromptTemplate_Prompts implements INode {
label: string
@ -18,6 +19,7 @@ class ChatPromptTemplate_Prompts implements INode {
this.icon = 'prompt.svg'
this.category = 'Prompts'
this.description = 'Schema to represent a chat prompt'
this.baseClasses = [this.type, ...getBaseClasses(ChatPromptTemplate)]
this.inputs = [
{
label: 'System Message',
@ -36,13 +38,7 @@ class ChatPromptTemplate_Prompts implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
const { ChatPromptTemplate } = await import('langchain/prompts')
return getBaseClasses(ChatPromptTemplate)
}
async init(nodeData: INodeData): Promise<any> {
const { ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate } = await import('langchain/prompts')
const systemMessagePrompt = nodeData.inputs?.systemMessagePrompt as string
const humanMessagePrompt = nodeData.inputs?.humanMessagePrompt as string

View File

@ -1,5 +1,8 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getInputVariables } from '../../../src/utils'
import { FewShotPromptTemplate, FewShotPromptTemplateInput, PromptTemplate } from 'langchain/prompts'
import { Example } from 'langchain/schema'
import { TemplateFormat } from 'langchain/dist/prompts/template'
class FewShotPromptTemplate_Prompts implements INode {
label: string
@ -18,6 +21,7 @@ class FewShotPromptTemplate_Prompts implements INode {
this.icon = 'prompt.svg'
this.category = 'Prompts'
this.description = 'Prompt template you can build with examples'
this.baseClasses = [this.type, ...getBaseClasses(FewShotPromptTemplate)]
this.inputs = [
{
label: 'Examples',
@ -32,7 +36,7 @@ class FewShotPromptTemplate_Prompts implements INode {
{
label: 'Example Prompt',
name: 'examplePrompt',
type: 'BasePromptTemplate'
type: 'PromptTemplate'
},
{
label: 'Prefix',
@ -73,27 +77,19 @@ class FewShotPromptTemplate_Prompts implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
const { FewShotPromptTemplate } = await import('langchain/prompts')
return getBaseClasses(FewShotPromptTemplate)
}
async init(nodeData: INodeData): Promise<any> {
const { FewShotPromptTemplate } = await import('langchain/prompts')
const examplesStr = nodeData.inputs?.examples as string
const prefix = nodeData.inputs?.prefix as string
const suffix = nodeData.inputs?.suffix as string
const exampleSeparator = nodeData.inputs?.exampleSeparator as string
const templateFormat = nodeData.inputs?.templateFormat
const examplePrompt = nodeData.inputs?.examplePrompt
const templateFormat = nodeData.inputs?.templateFormat as TemplateFormat
const examplePrompt = nodeData.inputs?.examplePrompt as PromptTemplate
const inputVariables = getInputVariables(suffix)
const examples = JSON.parse(examplesStr.replace(/\s/g, ''))
const examples: Example[] = JSON.parse(examplesStr.replace(/\s/g, ''))
try {
const prompt = new FewShotPromptTemplate({
const obj: FewShotPromptTemplateInput = {
examples,
examplePrompt,
prefix,
@ -101,7 +97,8 @@ class FewShotPromptTemplate_Prompts implements INode {
inputVariables,
exampleSeparator,
templateFormat
})
}
const prompt = new FewShotPromptTemplate(obj)
return prompt
} catch (e) {
throw new Error(e)

View File

@ -1,5 +1,6 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getInputVariables } from '../../../src/utils'
import { PromptTemplate, PromptTemplateInput } from 'langchain/prompts'
class PromptTemplate_Prompts implements INode {
label: string
@ -18,6 +19,7 @@ class PromptTemplate_Prompts implements INode {
this.icon = 'prompt.svg'
this.category = 'Prompts'
this.description = 'Schema to represent a basic prompt for an LLM'
this.baseClasses = [this.type, ...getBaseClasses(PromptTemplate)]
this.inputs = [
{
label: 'Template',
@ -29,19 +31,12 @@ class PromptTemplate_Prompts implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
const { PromptTemplate } = await import('langchain/prompts')
return getBaseClasses(PromptTemplate)
}
async init(nodeData: INodeData): Promise<any> {
const { PromptTemplate } = await import('langchain/prompts')
const template = nodeData.inputs?.template as string
const inputVariables = getInputVariables(template)
try {
const options = {
const options: PromptTemplateInput = {
template,
inputVariables
}

View File

@ -1,5 +1,6 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { RecursiveCharacterTextSplitter, RecursiveCharacterTextSplitterParams } from 'langchain/text_splitter'
class RecursiveCharacterTextSplitter_TextSplitters implements INode {
label: string
@ -18,6 +19,7 @@ class RecursiveCharacterTextSplitter_TextSplitters implements INode {
this.icon = 'textsplitter.svg'
this.category = 'Text Splitters'
this.description = `Split documents recursively by different characters - starting with "\n\n", then "\n", then " "`
this.baseClasses = [this.type, ...getBaseClasses(RecursiveCharacterTextSplitter)]
this.inputs = [
{
label: 'Chunk Size',
@ -35,17 +37,11 @@ class RecursiveCharacterTextSplitter_TextSplitters implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
const { RecursiveCharacterTextSplitter } = await import('langchain/text_splitter')
return getBaseClasses(RecursiveCharacterTextSplitter)
}
async init(nodeData: INodeData): Promise<any> {
const { RecursiveCharacterTextSplitter } = await import('langchain/text_splitter')
const chunkSize = nodeData.inputs?.chunkSize as string
const chunkOverlap = nodeData.inputs?.chunkOverlap as string
const obj = {} as any
const obj = {} as RecursiveCharacterTextSplitterParams
if (chunkSize) obj.chunkSize = parseInt(chunkSize, 10)
if (chunkOverlap) obj.chunkOverlap = parseInt(chunkOverlap, 10)

View File

@ -1,7 +1,8 @@
import { INode } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { Calculator } from 'langchain/tools/calculator'
class Calculator implements INode {
class Calculator_Tools implements INode {
label: string
name: string
description: string
@ -17,17 +18,12 @@ class Calculator implements INode {
this.icon = 'calculator.svg'
this.category = 'Tools'
this.description = 'Perform calculations on response'
}
async getBaseClasses(): Promise<string[]> {
const { Calculator } = await import('langchain/tools')
return getBaseClasses(Calculator)
this.baseClasses = [this.type, ...getBaseClasses(Calculator)]
}
async init(): Promise<any> {
const { Calculator } = await import('langchain/tools')
return new Calculator()
}
}
module.exports = { nodeClass: Calculator }
module.exports = { nodeClass: Calculator_Tools }

View File

@ -0,0 +1,29 @@
import { INode } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { RequestsGetTool } from 'langchain/tools'
class RequestsGet_Tools implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
constructor() {
this.label = 'Requests Get'
this.name = 'requestsGet'
this.type = 'RequestsGet'
this.icon = 'requestsget.svg'
this.category = 'Tools'
this.description = 'Execute HTTP GET requests'
this.baseClasses = [this.type, ...getBaseClasses(RequestsGetTool)]
}
async init(): Promise<any> {
return new RequestsGetTool()
}
}
module.exports = { nodeClass: RequestsGet_Tools }

View File

@ -0,0 +1,8 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-http-get" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M7 8h-2a2 2 0 0 0 -2 2v4a2 2 0 0 0 2 2h2v-4h-1"></path>
<path d="M14 8h-4v8h4"></path>
<path d="M10 12h2.5"></path>
<path d="M17 8h4"></path>
<path d="M19 8v8"></path>
</svg>

After

Width:  |  Height:  |  Size: 487 B

View File

@ -0,0 +1,29 @@
import { INode } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { RequestsPostTool } from 'langchain/tools'
class RequestsPost_Tools implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
constructor() {
this.label = 'Requests Post'
this.name = 'requestsPost'
this.type = 'RequestsPost'
this.icon = 'requestspost.svg'
this.category = 'Tools'
this.description = 'Execute HTTP POST requests'
this.baseClasses = [this.type, ...getBaseClasses(RequestsPostTool)]
}
async init(): Promise<any> {
return new RequestsPostTool()
}
}
module.exports = { nodeClass: RequestsPost_Tools }

View File

@ -0,0 +1,6 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-http-post" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M3 12h2a2 2 0 1 0 0 -4h-2v8"></path>
<path d="M12 8a2 2 0 0 1 2 2v4a2 2 0 1 1 -4 0v-4a2 2 0 0 1 2 -2z"></path>
<path d="M17 15a1 1 0 0 0 1 1h2a1 1 0 0 0 1 -1v-2a1 1 0 0 0 -1 -1h-2a1 1 0 0 1 -1 -1v-2a1 1 0 0 1 1 -1h2a1 1 0 0 1 1 1"></path>
</svg>

After

Width:  |  Height:  |  Size: 553 B

View File

@ -1,7 +1,8 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { SerpAPI } from 'langchain/tools'
class SerpAPI implements INode {
class SerpAPI_Tools implements INode {
label: string
name: string
description: string
@ -25,18 +26,13 @@ class SerpAPI implements INode {
type: 'password'
}
]
}
async getBaseClasses(): Promise<string[]> {
const { SerpAPI } = await import('langchain/tools')
return getBaseClasses(SerpAPI)
this.baseClasses = [this.type, ...getBaseClasses(SerpAPI)]
}
async init(nodeData: INodeData): Promise<any> {
const { SerpAPI } = await import('langchain/tools')
const apiKey = nodeData.inputs?.apiKey as string
return new SerpAPI(apiKey)
}
}
module.exports = { nodeClass: SerpAPI }
module.exports = { nodeClass: SerpAPI_Tools }

View File

@ -1,4 +1,7 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { Chroma } from 'langchain/vectorstores/chroma'
import { Embeddings } from 'langchain/embeddings/base'
import { getBaseClasses } from '../../../src/utils'
class Chroma_Existing_VectorStores implements INode {
label: string
@ -17,6 +20,7 @@ class Chroma_Existing_VectorStores implements INode {
this.icon = 'chroma.svg'
this.category = 'Vector Stores'
this.description = 'Load existing index from Chroma (i.e: Document has been upserted)'
this.baseClasses = [this.type, ...getBaseClasses(Chroma)]
this.inputs = [
{
label: 'Embeddings',
@ -31,15 +35,9 @@ class Chroma_Existing_VectorStores implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
return ['BaseRetriever']
}
async init(nodeData: INodeData): Promise<any> {
const { Chroma } = await import('langchain/vectorstores')
const collectionName = nodeData.inputs?.collectionName as string
const embeddings = nodeData.inputs?.embeddings
const embeddings = nodeData.inputs?.embeddings as Embeddings
const vectorStore = await Chroma.fromExistingCollection(embeddings, {
collectionName

View File

@ -1,4 +1,8 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { Chroma } from 'langchain/vectorstores/chroma'
import { Embeddings } from 'langchain/embeddings/base'
import { Document } from 'langchain/document'
import { getBaseClasses } from '../../../src/utils'
class ChromaUpsert_VectorStores implements INode {
label: string
@ -17,6 +21,7 @@ class ChromaUpsert_VectorStores implements INode {
this.icon = 'chroma.svg'
this.category = 'Vector Stores'
this.description = 'Upsert documents to Chroma'
this.baseClasses = [this.type, ...getBaseClasses(Chroma)]
this.inputs = [
{
label: 'Document',
@ -36,28 +41,20 @@ class ChromaUpsert_VectorStores implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
return ['BaseRetriever']
}
async init(nodeData: INodeData): Promise<any> {
const { Chroma } = await import('langchain/vectorstores')
const { Document } = await import('langchain/document')
const collectionName = nodeData.inputs?.collectionName as string
const docs = nodeData.inputs?.document
const embeddings = nodeData.inputs?.embeddings
const docs = nodeData.inputs?.document as Document[]
const embeddings = nodeData.inputs?.embeddings as Embeddings
const finalDocs = []
for (let i = 0; i < docs.length; i += 1) {
finalDocs.push(new Document(docs[i]))
}
const result = await Chroma.fromDocuments(finalDocs, embeddings, {
const vectorStore = await Chroma.fromDocuments(finalDocs, embeddings, {
collectionName
})
const retriever = result.asRetriever()
const retriever = vectorStore.asRetriever()
return retriever
}
}

View File

@ -1,5 +1,8 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { PineconeClient } from '@pinecone-database/pinecone'
import { PineconeStore } from 'langchain/vectorstores/pinecone'
import { Embeddings } from 'langchain/embeddings/base'
import { getBaseClasses } from '../../../src/utils'
class Pinecone_Existing_VectorStores implements INode {
label: string
@ -18,6 +21,7 @@ class Pinecone_Existing_VectorStores implements INode {
this.icon = 'pinecone.png'
this.category = 'Vector Stores'
this.description = 'Load existing index from Pinecone (i.e: Document has been upserted)'
this.baseClasses = [this.type, ...getBaseClasses(PineconeStore)]
this.inputs = [
{
label: 'Embeddings',
@ -42,17 +46,11 @@ class Pinecone_Existing_VectorStores implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
return ['BaseRetriever']
}
async init(nodeData: INodeData): Promise<any> {
const { PineconeStore } = await import('langchain/vectorstores')
const pineconeApiKey = nodeData.inputs?.pineconeApiKey as string
const pineconeEnv = nodeData.inputs?.pineconeEnv as string
const index = nodeData.inputs?.pineconeIndex as string
const embeddings = nodeData.inputs?.embeddings
const embeddings = nodeData.inputs?.embeddings as Embeddings
const client = new PineconeClient()
await client.init({

View File

@ -1,5 +1,9 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { PineconeClient } from '@pinecone-database/pinecone'
import { PineconeStore } from 'langchain/vectorstores/pinecone'
import { Embeddings } from 'langchain/embeddings/base'
import { Document } from 'langchain/document'
import { getBaseClasses } from '../../../src/utils'
class PineconeUpsert_VectorStores implements INode {
label: string
@ -18,6 +22,7 @@ class PineconeUpsert_VectorStores implements INode {
this.icon = 'pinecone.png'
this.category = 'Vector Stores'
this.description = 'Upsert documents to Pinecone'
this.baseClasses = [this.type, ...getBaseClasses(PineconeStore)]
this.inputs = [
{
label: 'Document',
@ -47,19 +52,12 @@ class PineconeUpsert_VectorStores implements INode {
]
}
async getBaseClasses(): Promise<string[]> {
return ['BaseRetriever']
}
async init(nodeData: INodeData): Promise<any> {
const { PineconeStore } = await import('langchain/vectorstores')
const { Document } = await import('langchain/document')
const pineconeApiKey = nodeData.inputs?.pineconeApiKey as string
const pineconeEnv = nodeData.inputs?.pineconeEnv as string
const index = nodeData.inputs?.pineconeIndex as string
const docs = nodeData.inputs?.document
const embeddings = nodeData.inputs?.embeddings
const docs = nodeData.inputs?.document as Document[]
const embeddings = nodeData.inputs?.embeddings as Embeddings
const client = new PineconeClient()
await client.init({
@ -74,11 +72,10 @@ class PineconeUpsert_VectorStores implements INode {
finalDocs.push(new Document(docs[i]))
}
const result = await PineconeStore.fromDocuments(finalDocs, embeddings, {
const vectorStore = await PineconeStore.fromDocuments(finalDocs, embeddings, {
pineconeIndex
})
const retriever = result.asRetriever()
const retriever = vectorStore.asRetriever()
return retriever
}
}

View File

@ -24,7 +24,7 @@
"dotenv": "^16.0.0",
"express": "^4.17.3",
"form-data": "^4.0.0",
"langchain": "^0.0.44",
"langchain": "^0.0.53",
"moment": "^2.29.3",
"node-fetch": "2",
"pdfjs-dist": "^3.5.141",

View File

@ -75,7 +75,6 @@ export interface INodeProperties {
export interface INode extends INodeProperties {
inputs?: INodeParams[]
getBaseClasses?(): Promise<string[]>
getInstance?(nodeData: INodeData): Promise<string>
run?(nodeData: INodeData, input: string, options?: ICommonObject): Promise<string>
}

View File

@ -14,8 +14,7 @@
"strictPropertyInitialization": false,
"useUnknownInCatchVariables": false,
"declaration": true,
"module": "commonjs",
"moduleResolution": "node16"
"module": "commonjs"
},
"include": ["src", "nodes"]
}

View File

@ -3,11 +3,58 @@
"nodes": [
{
"width": 300,
"height": 885,
"height": 360,
"id": "promptTemplate_0",
"position": {
"x": 294.38456937448433,
"y": 66.5400435451831
},
"type": "customNode",
"data": {
"id": "promptTemplate_0",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 5,
"placeholder": "What is a good name for a company that makes {product}?"
}
],
"inputAnchors": [],
"inputs": {
"template": "Word: {word}\\nAntonym: {antonym}\\n"
},
"outputAnchors": [
{
"id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 294.38456937448433,
"y": 66.5400435451831
},
"dragging": false
},
{
"width": 300,
"height": 886,
"id": "fewShotPromptTemplate_0",
"position": {
"x": 495.78246013667433,
"y": 168.3684510250569
"x": 719.2200337843097,
"y": 67.20405755860693
},
"type": "customNode",
"data": {
@ -15,7 +62,7 @@
"label": "Few Shot Prompt Template",
"name": "fewShotPromptTemplate",
"type": "FewShotPromptTemplate",
"baseClasses": ["BaseStringPromptTemplate", "BasePromptTemplate"],
"baseClasses": ["FewShotPromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Prompt template you can build with examples",
"inputParams": [
@ -24,7 +71,7 @@
"name": "examples",
"type": "string",
"rows": 5,
"placeholder": "[\n { word: \"happy\", antonym: \"sad\" },\n { word: \"tall\", antonym: \"short\" },\n]"
"placeholder": "[\n { \"word\": \"happy\", \"antonym\": \"sad\" },\n { \"word\": \"tall\", \"antonym\": \"short\" },\n]"
},
{
"label": "Prefix",
@ -67,12 +114,12 @@
{
"label": "Example Prompt",
"name": "examplePrompt",
"type": "BasePromptTemplate",
"id": "fewShotPromptTemplate_0-input-examplePrompt-BasePromptTemplate"
"type": "PromptTemplate",
"id": "fewShotPromptTemplate_0-input-examplePrompt-PromptTemplate"
}
],
"inputs": {
"examples": "[\n { \"word\": \"happy\", \"antonym\": \"sad\" },\n { \"word\": \"tall\", \"antonym\": \"short\" }\n]",
"examples": "[\n { \"word\": \"happy\", \"antonym\": \"sad\" },\n { \"word\": \"tall\", \"antonym\": \"short\" }\n]",
"examplePrompt": "{{promptTemplate_0.data.instance}}",
"prefix": "Give the antonym of every input",
"suffix": "Word: {input}\\nAntonym:",
@ -81,128 +128,28 @@
},
"outputAnchors": [
{
"id": "fewShotPromptTemplate_0-output-fewShotPromptTemplate-BaseStringPromptTemplate|BasePromptTemplate",
"id": "fewShotPromptTemplate_0-output-fewShotPromptTemplate-FewShotPromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "fewShotPromptTemplate",
"label": "FewShotPromptTemplate",
"type": "BaseStringPromptTemplate | BasePromptTemplate"
"type": "FewShotPromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 495.78246013667433,
"y": 168.3684510250569
"x": 719.2200337843097,
"y": 67.20405755860693
},
"dragging": false
},
{
"width": 300,
"height": 359,
"id": "promptTemplate_0",
"position": {
"x": 13.229214123006699,
"y": 171.79555808656028
},
"type": "customNode",
"data": {
"id": "promptTemplate_0",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 5,
"placeholder": "What is a good name for a company that makes {product}?"
}
],
"inputAnchors": [],
"inputs": {
"template": "Word: {word}\\nAntonym: {antonym}\\n"
},
"outputAnchors": [
{
"id": "promptTemplate_0-output-promptTemplate-BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "BaseStringPromptTemplate | BasePromptTemplate"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 13.229214123006699,
"y": 171.79555808656028
},
"dragging": false
},
{
"width": 300,
"height": 279,
"id": "llmChain_0",
"position": {
"x": 1237.4411644942688,
"y": 508.82448993622904
},
"type": "customNode",
"data": {
"id": "llmChain_0",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["BaseChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [],
"inputAnchors": [
{
"label": "LLM",
"name": "llm",
"type": "BaseLanguageModel",
"id": "llmChain_0-input-llm-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_0-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"llm": "{{openAI_0.data.instance}}",
"prompt": "{{fewShotPromptTemplate_0.data.instance}}"
},
"outputAnchors": [
{
"id": "llmChain_0-output-llmChain-BaseChain",
"name": "llmChain",
"label": "LLMChain",
"type": "BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1237.4411644942688,
"y": 508.82448993622904
},
"dragging": false
},
{
"width": 300,
"height": 471,
"height": 472,
"id": "openAI_0",
"position": {
"x": 859.220671981777,
"y": 166.25170842824588
"x": 1089.6434062122398,
"y": 27.515288538129425
},
"type": "customNode",
"data": {
@ -210,7 +157,7 @@
"label": "OpenAI",
"name": "openAI",
"type": "OpenAI",
"baseClasses": ["BaseLLM", "BaseLanguageModel"],
"baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Wrapper around OpenAI large language models",
"inputParams": [
@ -259,18 +206,81 @@
},
"outputAnchors": [
{
"id": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"id": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"name": "openAI",
"label": "OpenAI",
"type": "BaseLLM | BaseLanguageModel"
"type": "OpenAI | BaseLLM | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 859.220671981777,
"y": 166.25170842824588
"x": 1089.6434062122398,
"y": 27.515288538129425
},
"dragging": false
},
{
"width": 300,
"height": 461,
"id": "llmChain_0",
"position": {
"x": 1499.2654451385026,
"y": 356.3275374721362
},
"type": "customNode",
"data": {
"id": "llmChain_0",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["LLMChain", "BaseChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 5,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "llmChain_0-input-model-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_0-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"model": "{{openAI_0.data.instance}}",
"prompt": "{{fewShotPromptTemplate_0.data.instance}}",
"promptValues": ""
},
"outputAnchors": [
{
"id": "llmChain_0-output-llmChain-LLMChain|BaseChain",
"name": "llmChain",
"label": "LLMChain",
"type": "LLMChain | BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1499.2654451385026,
"y": 356.3275374721362
},
"dragging": false
}
@ -278,33 +288,33 @@
"edges": [
{
"source": "promptTemplate_0",
"sourceHandle": "promptTemplate_0-output-promptTemplate-BaseStringPromptTemplate|BasePromptTemplate",
"sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "fewShotPromptTemplate_0",
"targetHandle": "fewShotPromptTemplate_0-input-examplePrompt-BasePromptTemplate",
"targetHandle": "fewShotPromptTemplate_0-input-examplePrompt-PromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-BaseStringPromptTemplate|BasePromptTemplate-fewShotPromptTemplate_0-fewShotPromptTemplate_0-input-examplePrompt-BasePromptTemplate",
"data": {
"label": ""
}
},
{
"source": "fewShotPromptTemplate_0",
"sourceHandle": "fewShotPromptTemplate_0-output-fewShotPromptTemplate-BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "fewShotPromptTemplate_0-fewShotPromptTemplate_0-output-fewShotPromptTemplate-BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-fewShotPromptTemplate_0-fewShotPromptTemplate_0-input-examplePrompt-PromptTemplate",
"data": {
"label": ""
}
},
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-llm-BaseLanguageModel",
"targetHandle": "llmChain_0-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "openAI_0-openAI_0-output-openAI-BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-llm-BaseLanguageModel",
"id": "openAI_0-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-model-BaseLanguageModel",
"data": {
"label": ""
}
},
{
"source": "fewShotPromptTemplate_0",
"sourceHandle": "fewShotPromptTemplate_0-output-fewShotPromptTemplate-FewShotPromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "fewShotPromptTemplate_0-fewShotPromptTemplate_0-output-fewShotPromptTemplate-FewShotPromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}

View File

@ -3,11 +3,11 @@
"nodes": [
{
"width": 300,
"height": 277,
"height": 278,
"id": "serpAPI_0",
"position": {
"x": 738.3791942291381,
"y": 61.26790912730354
"x": 509.0449726750261,
"y": 40.29354052812607
},
"type": "customNode",
"data": {
@ -15,7 +15,7 @@
"label": "Serp API",
"name": "serpAPI",
"type": "SerpAPI",
"baseClasses": ["Tool"],
"baseClasses": ["SerpAPI", "Tool"],
"category": "Tools",
"description": "Wrapper around SerpAPI - a real-time API to access Google search results",
"inputParams": [
@ -29,28 +29,28 @@
"inputs": {},
"outputAnchors": [
{
"id": "serpAPI_0-output-serpAPI-Tool",
"id": "serpAPI_0-output-serpAPI-SerpAPI|Tool",
"name": "serpAPI",
"label": "SerpAPI",
"type": "Tool"
"type": "SerpAPI | Tool"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 738.3791942291381,
"y": 61.26790912730354
"x": 509.0449726750261,
"y": 40.29354052812607
},
"dragging": false
},
{
"width": 300,
"height": 142,
"height": 143,
"id": "calculator_0",
"position": {
"x": 1088.946090950564,
"y": 63.99579982092973
"x": 877.5497970197198,
"y": 59.162476101133734
},
"type": "customNode",
"data": {
@ -58,7 +58,7 @@
"label": "Calculator",
"name": "calculator",
"type": "Calculator",
"baseClasses": ["Tool"],
"baseClasses": ["Calculator", "Tool"],
"category": "Tools",
"description": "Perform calculations on response",
"inputParams": [],
@ -66,28 +66,28 @@
"inputs": {},
"outputAnchors": [
{
"id": "calculator_0-output-calculator-Tool",
"id": "calculator_0-output-calculator-Calculator|Tool",
"name": "calculator",
"label": "Calculator",
"type": "Tool"
"type": "Calculator | Tool"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1088.946090950564,
"y": 63.99579982092973
"x": 877.5497970197198,
"y": 59.162476101133734
},
"dragging": false
},
{
"width": 300,
"height": 471,
"height": 472,
"id": "chatOpenAI_0",
"position": {
"x": 741.0274881835038,
"y": 365.0891876953251
"x": 451.9774324962526,
"y": 370.86893557300755
},
"type": "customNode",
"data": {
@ -95,7 +95,7 @@
"label": "ChatOpenAI",
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["BaseChatModel", "BaseLanguageModel"],
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
"description": "Wrapper around OpenAI large language models that use the Chat endpoint",
"inputParams": [
@ -109,6 +109,18 @@
"name": "modelName",
"type": "options",
"options": [
{
"label": "gpt-4",
"name": "gpt-4"
},
{
"label": "gpt-4-0314",
"name": "gpt-4-0314"
},
{
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -136,18 +148,18 @@
},
"outputAnchors": [
{
"id": "chatOpenAI_0-output-chatOpenAI-BaseChatModel|BaseLanguageModel",
"id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"name": "chatOpenAI",
"label": "ChatOpenAI",
"type": "BaseChatModel | BaseLanguageModel"
"type": "ChatOpenAI | BaseChatModel | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 741.0274881835038,
"y": 365.0891876953251
"x": 451.9774324962526,
"y": 370.86893557300755
},
"dragging": false
},
@ -156,8 +168,8 @@
"height": 376,
"id": "bufferMemory_0",
"position": {
"x": 753.3628847860326,
"y": 864.8446075184364
"x": 823.1867811443266,
"y": 704.7589374803455
},
"type": "customNode",
"data": {
@ -165,7 +177,7 @@
"label": "Buffer Memory",
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BaseChatMemory", "BaseMemory"],
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
"description": "Perform calculations on response",
"inputParams": [
@ -189,28 +201,28 @@
},
"outputAnchors": [
{
"id": "bufferMemory_0-output-bufferMemory-BaseChatMemory|BaseMemory",
"id": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
"name": "bufferMemory",
"label": "BufferMemory",
"type": "BaseChatMemory | BaseMemory"
"type": "BufferMemory | BaseChatMemory | BaseMemory"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 753.3628847860326,
"y": 864.8446075184364
"x": 823.1867811443266,
"y": 704.7589374803455
},
"dragging": false
},
{
"width": 300,
"height": 330,
"height": 331,
"id": "conversationalAgent_0",
"position": {
"x": 1487.0651648211865,
"y": 497.1658250180486
"x": 1257.706443811743,
"y": 335.10277854416955
},
"type": "customNode",
"data": {
@ -218,7 +230,7 @@
"label": "Conversational Agent",
"name": "conversationalAgent",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor"],
"baseClasses": ["AgentExecutor", "BaseChain"],
"category": "Agents",
"description": "Conversational agent for a chat model. It will utilize chat specific prompts",
"inputParams": [],
@ -250,18 +262,18 @@
},
"outputAnchors": [
{
"id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor",
"id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain",
"name": "conversationalAgent",
"label": "AgentExecutor",
"type": "AgentExecutor"
"type": "AgentExecutor | BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1487.0651648211865,
"y": 497.1658250180486
"x": 1257.706443811743,
"y": 335.10277854416955
},
"dragging": false
}
@ -269,44 +281,44 @@
"edges": [
{
"source": "calculator_0",
"sourceHandle": "calculator_0-output-calculator-Tool",
"sourceHandle": "calculator_0-output-calculator-Calculator|Tool",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-tools-Tool",
"type": "buttonedge",
"id": "calculator_0-calculator_0-output-calculator-Tool-conversationalAgent_0-conversationalAgent_0-input-tools-Tool",
"id": "calculator_0-calculator_0-output-calculator-Calculator|Tool-conversationalAgent_0-conversationalAgent_0-input-tools-Tool",
"data": {
"label": ""
}
},
{
"source": "serpAPI_0",
"sourceHandle": "serpAPI_0-output-serpAPI-Tool",
"sourceHandle": "serpAPI_0-output-serpAPI-SerpAPI|Tool",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-tools-Tool",
"type": "buttonedge",
"id": "serpAPI_0-serpAPI_0-output-serpAPI-Tool-conversationalAgent_0-conversationalAgent_0-input-tools-Tool",
"id": "serpAPI_0-serpAPI_0-output-serpAPI-SerpAPI|Tool-conversationalAgent_0-conversationalAgent_0-input-tools-Tool",
"data": {
"label": ""
}
},
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-BaseChatModel|BaseLanguageModel",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseChatModel",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseChatModel",
"data": {
"label": ""
}
},
{
"source": "bufferMemory_0",
"sourceHandle": "bufferMemory_0-output-bufferMemory-BaseChatMemory|BaseMemory",
"sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-memory-BaseChatMemory",
"type": "buttonedge",
"id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BaseChatMemory|BaseMemory-conversationalAgent_0-conversationalAgent_0-input-memory-BaseChatMemory",
"id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationalAgent_0-conversationalAgent_0-input-memory-BaseChatMemory",
"data": {
"label": ""
}

View File

@ -6,8 +6,8 @@
"height": 376,
"id": "recursiveCharacterTextSplitter_0",
"position": {
"x": 542.7867965644035,
"y": 239.47308806541884
"x": 483.2031495359837,
"y": 208.70988551611597
},
"type": "customNode",
"data": {
@ -15,7 +15,7 @@
"label": "Recursive Character Text Splitter",
"name": "recursiveCharacterTextSplitter",
"type": "RecursiveCharacterTextSplitter",
"baseClasses": ["TextSplitter"],
"baseClasses": ["RecursiveCharacterTextSplitter", "TextSplitter"],
"category": "Text Splitters",
"description": "Split documents recursively by different characters - starting with \"\n\n\", then \"\n\", then \" \"",
"inputParams": [
@ -40,35 +40,35 @@
},
"outputAnchors": [
{
"id": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-TextSplitter",
"id": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter",
"name": "recursiveCharacterTextSplitter",
"label": "RecursiveCharacterTextSplitter",
"type": "TextSplitter"
"type": "RecursiveCharacterTextSplitter | TextSplitter"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 542.7867965644035,
"y": 239.47308806541884
"x": 483.2031495359837,
"y": 208.70988551611597
},
"dragging": false
},
{
"width": 300,
"height": 358,
"height": 359,
"id": "textFile_0",
"position": {
"x": 928.3774169979697,
"y": 473.8284271247462
"x": 858.2160149124683,
"y": 215.870325304158
},
"type": "customNode",
"data": {
"id": "textFile_0",
"label": "Text File",
"name": "textFile",
"type": "Text",
"type": "Document",
"baseClasses": ["Document"],
"category": "Document Loaders",
"description": "Load data from text files",
@ -96,7 +96,7 @@
{
"id": "textFile_0-output-textFile-Document",
"name": "textFile",
"label": "Text",
"label": "Document",
"type": "Document"
}
],
@ -104,61 +104,18 @@
},
"selected": false,
"positionAbsolute": {
"x": 928.3774169979697,
"y": 473.8284271247462
"x": 858.2160149124683,
"y": 215.870325304158
},
"dragging": false
},
{
"width": 300,
"height": 277,
"id": "openAIEmbeddings_0",
"position": {
"x": 924.7825209307788,
"y": 870.671152679558
},
"type": "customNode",
"data": {
"id": "openAIEmbeddings_0",
"label": "OpenAI Embeddings",
"name": "openAIEmbeddings",
"type": "OpenAIEmbeddings",
"baseClasses": ["Embeddings"],
"category": "Embeddings",
"description": "OpenAI API to generate embeddings for a given text",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
}
],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "openAIEmbeddings_0-output-openAIEmbeddings-Embeddings",
"name": "openAIEmbeddings",
"label": "OpenAIEmbeddings",
"type": "Embeddings"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 924.7825209307788,
"y": 870.671152679558
},
"dragging": false
},
{
"width": 300,
"height": 471,
"height": 472,
"id": "openAI_0",
"position": {
"x": 1296.7206878349027,
"y": 167.80701218012993
"x": 1207.112878089014,
"y": 19.892224585997383
},
"type": "customNode",
"data": {
@ -166,7 +123,7 @@
"label": "OpenAI",
"name": "openAI",
"type": "OpenAI",
"baseClasses": ["BaseLLM", "BaseLanguageModel"],
"baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Wrapper around OpenAI large language models",
"inputParams": [
@ -215,28 +172,71 @@
},
"outputAnchors": [
{
"id": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"id": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"name": "openAI",
"label": "OpenAI",
"type": "BaseLLM | BaseLanguageModel"
"type": "OpenAI | BaseLLM | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1296.7206878349027,
"y": 167.80701218012993
"x": 1207.112878089014,
"y": 19.892224585997383
},
"dragging": false
},
{
"width": 300,
"height": 576,
"height": 278,
"id": "openAIEmbeddings_0",
"position": {
"x": 758.2670802362803,
"y": 635.7886850619154
},
"type": "customNode",
"data": {
"id": "openAIEmbeddings_0",
"label": "OpenAI Embeddings",
"name": "openAIEmbeddings",
"type": "OpenAIEmbeddings",
"baseClasses": ["OpenAIEmbeddings", "Embeddings"],
"category": "Embeddings",
"description": "OpenAI API to generate embeddings for a given text",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
}
],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
"name": "openAIEmbeddings",
"label": "OpenAIEmbeddings",
"type": "OpenAIEmbeddings | Embeddings"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 758.2670802362803,
"y": 635.7886850619154
},
"dragging": false
},
{
"width": 300,
"height": 577,
"id": "pineconeUpsert_0",
"position": {
"x": 1293.8922607101565,
"y": 692.4802438205481
"x": 1212.220130988712,
"y": 526.8130243230098
},
"type": "customNode",
"data": {
@ -296,18 +296,18 @@
},
"selected": false,
"positionAbsolute": {
"x": 1293.8922607101565,
"y": 692.4802438205481
"x": 1212.220130988712,
"y": 526.8130243230098
},
"dragging": false
},
{
"width": 300,
"height": 279,
"height": 280,
"id": "conversationalRetrievalQAChain_0",
"position": {
"x": 1742.1979599824272,
"y": 607.6274300781624
"x": 1608.0332939239609,
"y": 410.3973881655837
},
"type": "customNode",
"data": {
@ -315,7 +315,7 @@
"label": "Conversational Retrieval QA Chain",
"name": "conversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain",
"baseClasses": ["BaseChain"],
"baseClasses": ["ConversationalRetrievalQAChain", "BaseChain"],
"category": "Chains",
"description": "Document QA - built on RetrievalQAChain to provide a chat history component",
"inputParams": [],
@ -323,8 +323,8 @@
{
"label": "LLM",
"name": "llm",
"type": "BaseLanguageModel",
"id": "conversationalRetrievalQAChain_0-input-llm-BaseLanguageModel"
"type": "BaseLLM",
"id": "conversationalRetrievalQAChain_0-input-llm-BaseLLM"
},
{
"label": "Vector Store Retriever",
@ -339,18 +339,18 @@
},
"outputAnchors": [
{
"id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-BaseChain",
"id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-ConversationalRetrievalQAChain|BaseChain",
"name": "conversationalRetrievalQAChain",
"label": "ConversationalRetrievalQAChain",
"type": "BaseChain"
"type": "ConversationalRetrievalQAChain | BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1742.1979599824272,
"y": 607.6274300781624
"x": 1608.0332939239609,
"y": 410.3973881655837
},
"dragging": false
}
@ -358,33 +358,11 @@
"edges": [
{
"source": "recursiveCharacterTextSplitter_0",
"sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-TextSplitter",
"sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter",
"target": "textFile_0",
"targetHandle": "textFile_0-input-textSplitter-TextSplitter",
"type": "buttonedge",
"id": "recursiveCharacterTextSplitter_0-recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-TextSplitter-textFile_0-textFile_0-input-textSplitter-TextSplitter",
"data": {
"label": ""
}
},
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-llm-BaseLanguageModel",
"type": "buttonedge",
"id": "openAI_0-openAI_0-output-openAI-BaseLLM|BaseLanguageModel-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-llm-BaseLanguageModel",
"data": {
"label": ""
}
},
{
"source": "pineconeUpsert_0",
"sourceHandle": "pineconeUpsert_0-output-pineconeUpsert-BaseRetriever",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"type": "buttonedge",
"id": "pineconeUpsert_0-pineconeUpsert_0-output-pineconeUpsert-BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"id": "recursiveCharacterTextSplitter_0-recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter-textFile_0-textFile_0-input-textSplitter-TextSplitter",
"data": {
"label": ""
}
@ -402,11 +380,33 @@
},
{
"source": "openAIEmbeddings_0",
"sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-Embeddings",
"sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
"target": "pineconeUpsert_0",
"targetHandle": "pineconeUpsert_0-input-embeddings-Embeddings",
"type": "buttonedge",
"id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-Embeddings-pineconeUpsert_0-pineconeUpsert_0-input-embeddings-Embeddings",
"id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeUpsert_0-pineconeUpsert_0-input-embeddings-Embeddings",
"data": {
"label": ""
}
},
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-llm-BaseLLM",
"type": "buttonedge",
"id": "openAI_0-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-llm-BaseLLM",
"data": {
"label": ""
}
},
{
"source": "pineconeUpsert_0",
"sourceHandle": "pineconeUpsert_0-output-pineconeUpsert-BaseRetriever",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"type": "buttonedge",
"id": "pineconeUpsert_0-pineconeUpsert_0-output-pineconeUpsert-BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"data": {
"label": ""
}

View File

@ -3,64 +3,177 @@
"nodes": [
{
"width": 300,
"height": 279,
"id": "conversationalRetrievalQAChain_0",
"height": 376,
"id": "recursiveCharacterTextSplitter_0",
"position": {
"x": 1557.4588265034258,
"y": 314.1114881397983
"x": 515.0218130437521,
"y": 133.69013788278536
},
"type": "customNode",
"data": {
"id": "conversationalRetrievalQAChain_0",
"label": "Conversational Retrieval QA Chain",
"name": "conversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain",
"baseClasses": ["BaseChain"],
"category": "Chains",
"description": "Document QA - built on RetrievalQAChain to provide a chat history component",
"inputParams": [],
"inputAnchors": [
"id": "recursiveCharacterTextSplitter_0",
"label": "Recursive Character Text Splitter",
"name": "recursiveCharacterTextSplitter",
"type": "RecursiveCharacterTextSplitter",
"baseClasses": ["RecursiveCharacterTextSplitter", "TextSplitter"],
"category": "Text Splitters",
"description": "Split documents recursively by different characters - starting with \"\n\n\", then \"\n\", then \" \"",
"inputParams": [
{
"label": "LLM",
"name": "llm",
"type": "BaseLanguageModel",
"id": "conversationalRetrievalQAChain_0-input-llm-BaseLanguageModel"
"label": "Chunk Size",
"name": "chunkSize",
"type": "number",
"default": 1000,
"optional": true
},
{
"label": "Vector Store Retriever",
"name": "vectorStoreRetriever",
"type": "BaseRetriever",
"id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever"
"label": "Chunk Overlap",
"name": "chunkOverlap",
"type": "number",
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"llm": "{{openAI_0.data.instance}}",
"vectorStoreRetriever": "{{pineconeUpsert_0.data.instance}}"
"chunkSize": 1000,
"chunkOverlap": ""
},
"outputAnchors": [
{
"id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-BaseChain",
"name": "conversationalRetrievalQAChain",
"label": "ConversationalRetrievalQAChain",
"type": "BaseChain"
"id": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter",
"name": "recursiveCharacterTextSplitter",
"label": "RecursiveCharacterTextSplitter",
"type": "RecursiveCharacterTextSplitter | TextSplitter"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1557.4588265034258,
"y": 314.1114881397983
"x": 515.0218130437521,
"y": 133.69013788278536
},
"dragging": false
},
{
"width": 300,
"height": 471,
"height": 526,
"id": "github_0",
"position": {
"x": 900.6064873076141,
"y": 12.699982761973843
},
"type": "customNode",
"data": {
"id": "github_0",
"label": "Github",
"name": "github",
"type": "Document",
"baseClasses": ["Document"],
"category": "Document Loaders",
"description": "Load data from a GitHub repository",
"inputParams": [
{
"label": "Repo Link",
"name": "repoLink",
"type": "string",
"placeholder": "https://github.com/FlowiseAI/Flowise"
},
{
"label": "Branch",
"name": "branch",
"type": "string",
"default": "main"
},
{
"label": "Access Token",
"name": "accessToken",
"type": "password",
"placeholder": "<GITHUB_ACCESS_TOKEN>",
"optional": true
}
],
"inputAnchors": [
{
"label": "Text Splitter",
"name": "textSplitter",
"type": "TextSplitter",
"optional": true,
"id": "github_0-input-textSplitter-TextSplitter"
}
],
"inputs": {
"repoLink": "https://github.com/kyrolabs/awesome-langchain",
"branch": "main",
"textSplitter": "{{recursiveCharacterTextSplitter_0.data.instance}}"
},
"outputAnchors": [
{
"id": "github_0-output-github-Document",
"name": "github",
"label": "Document",
"type": "Document"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 900.6064873076141,
"y": 12.699982761973843
},
"dragging": false
},
{
"width": 300,
"height": 278,
"id": "openAIEmbeddings_0",
"position": {
"x": 904.1187025903466,
"y": 591.9520266659382
},
"type": "customNode",
"data": {
"id": "openAIEmbeddings_0",
"label": "OpenAI Embeddings",
"name": "openAIEmbeddings",
"type": "OpenAIEmbeddings",
"baseClasses": ["OpenAIEmbeddings", "Embeddings"],
"category": "Embeddings",
"description": "OpenAI API to generate embeddings for a given text",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
}
],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
"name": "openAIEmbeddings",
"label": "OpenAIEmbeddings",
"type": "OpenAIEmbeddings | Embeddings"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 904.1187025903466,
"y": 591.9520266659382
},
"dragging": false
},
{
"width": 300,
"height": 472,
"id": "openAI_0",
"position": {
"x": 1079.79101466888,
"y": -178.54116849152098
"x": 1268.0020638568344,
"y": -134.40583642072306
},
"type": "customNode",
"data": {
@ -68,7 +181,7 @@
"label": "OpenAI",
"name": "openAI",
"type": "OpenAI",
"baseClasses": ["BaseLLM", "BaseLanguageModel"],
"baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Wrapper around OpenAI large language models",
"inputParams": [
@ -113,198 +226,32 @@
"inputAnchors": [],
"inputs": {
"modelName": "text-davinci-003",
"temperature": "0"
"temperature": 0.7
},
"outputAnchors": [
{
"id": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"id": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"name": "openAI",
"label": "OpenAI",
"type": "BaseLLM | BaseLanguageModel"
"type": "OpenAI | BaseLLM | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1079.79101466888,
"y": -178.54116849152098
"x": 1268.0020638568344,
"y": -134.40583642072306
},
"dragging": false
},
{
"width": 300,
"height": 376,
"id": "recursiveCharacterTextSplitter_0",
"position": {
"x": 235.6130554027991,
"y": -89.82544163833616
},
"type": "customNode",
"data": {
"id": "recursiveCharacterTextSplitter_0",
"label": "Recursive Character Text Splitter",
"name": "recursiveCharacterTextSplitter",
"type": "RecursiveCharacterTextSplitter",
"baseClasses": ["TextSplitter"],
"category": "Text Splitters",
"description": "Split documents recursively by different characters - starting with \"\n\n\", then \"\n\", then \" \"",
"inputParams": [
{
"label": "Chunk Size",
"name": "chunkSize",
"type": "number",
"default": 1000,
"optional": true
},
{
"label": "Chunk Overlap",
"name": "chunkOverlap",
"type": "number",
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"chunkSize": 1000,
"chunkOverlap": ""
},
"outputAnchors": [
{
"id": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-TextSplitter",
"name": "recursiveCharacterTextSplitter",
"label": "RecursiveCharacterTextSplitter",
"type": "TextSplitter"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 235.6130554027991,
"y": -89.82544163833616
},
"dragging": false
},
{
"width": 300,
"height": 277,
"id": "openAIEmbeddings_0",
"position": {
"x": 654.473220763302,
"y": 508.09797567725514
},
"type": "customNode",
"data": {
"id": "openAIEmbeddings_0",
"label": "OpenAI Embeddings",
"name": "openAIEmbeddings",
"type": "OpenAIEmbeddings",
"baseClasses": ["Embeddings"],
"category": "Embeddings",
"description": "OpenAI API to generate embeddings for a given text",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
}
],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "openAIEmbeddings_0-output-openAIEmbeddings-Embeddings",
"name": "openAIEmbeddings",
"label": "OpenAIEmbeddings",
"type": "Embeddings"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 654.473220763302,
"y": 508.09797567725514
},
"dragging": false
},
{
"width": 300,
"height": 525,
"id": "github_0",
"position": {
"x": 649.303910738229,
"y": -93.90566658624722
},
"type": "customNode",
"data": {
"id": "github_0",
"label": "Github",
"name": "github",
"type": "Github",
"baseClasses": ["Document"],
"category": "Document Loaders",
"description": "Load data from a GitHub repository",
"inputParams": [
{
"label": "Repo Link",
"name": "repoLink",
"type": "string",
"placeholder": "https://github.com/FlowiseAI/Flowise"
},
{
"label": "Branch",
"name": "branch",
"type": "string",
"default": "main"
},
{
"label": "Access Token",
"name": "accessToken",
"type": "password",
"placeholder": "<GITHUB_ACCESS_TOKEN>",
"optional": true
}
],
"inputAnchors": [
{
"label": "Text Splitter",
"name": "textSplitter",
"type": "TextSplitter",
"optional": true,
"id": "github_0-input-textSplitter-TextSplitter"
}
],
"inputs": {
"repoLink": "https://github.com/kyrolabs/awesome-langchain",
"branch": "main",
"textSplitter": "{{recursiveCharacterTextSplitter_0.data.instance}}"
},
"outputAnchors": [
{
"id": "github_0-output-github-Document",
"name": "github",
"label": "Github",
"type": "Document"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 649.303910738229,
"y": -93.90566658624722
},
"dragging": false
},
{
"width": 300,
"height": 576,
"height": 577,
"id": "pineconeUpsert_0",
"position": {
"x": 1089.3652950320754,
"y": 354.9656606763275
"x": 1265.1304547629002,
"y": 376.13121569675315
},
"type": "customNode",
"data": {
@ -364,8 +311,61 @@
},
"selected": false,
"positionAbsolute": {
"x": 1089.3652950320754,
"y": 354.9656606763275
"x": 1265.1304547629002,
"y": 376.13121569675315
},
"dragging": false
},
{
"width": 300,
"height": 280,
"id": "conversationalRetrievalQAChain_0",
"position": {
"x": 1658.7012040564862,
"y": 197.0636463189023
},
"type": "customNode",
"data": {
"id": "conversationalRetrievalQAChain_0",
"label": "Conversational Retrieval QA Chain",
"name": "conversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain",
"baseClasses": ["ConversationalRetrievalQAChain", "BaseChain"],
"category": "Chains",
"description": "Document QA - built on RetrievalQAChain to provide a chat history component",
"inputParams": [],
"inputAnchors": [
{
"label": "LLM",
"name": "llm",
"type": "BaseLLM",
"id": "conversationalRetrievalQAChain_0-input-llm-BaseLLM"
},
{
"label": "Vector Store Retriever",
"name": "vectorStoreRetriever",
"type": "BaseRetriever",
"id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever"
}
],
"inputs": {
"llm": "{{openAI_0.data.instance}}",
"vectorStoreRetriever": "{{pineconeUpsert_0.data.instance}}"
},
"outputAnchors": [
{
"id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-ConversationalRetrievalQAChain|BaseChain",
"name": "conversationalRetrievalQAChain",
"label": "ConversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain | BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1658.7012040564862,
"y": 197.0636463189023
},
"dragging": false
}
@ -373,22 +373,33 @@
"edges": [
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-llm-BaseLanguageModel",
"targetHandle": "conversationalRetrievalQAChain_0-input-llm-BaseLLM",
"type": "buttonedge",
"id": "openAI_0-openAI_0-output-openAI-BaseLLM|BaseLanguageModel-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-llm-BaseLanguageModel",
"id": "openAI_0-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-llm-BaseLLM",
"data": {
"label": ""
}
},
{
"source": "recursiveCharacterTextSplitter_0",
"sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-TextSplitter",
"target": "github_0",
"targetHandle": "github_0-input-textSplitter-TextSplitter",
"source": "pineconeUpsert_0",
"sourceHandle": "pineconeUpsert_0-output-pineconeUpsert-BaseRetriever",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"type": "buttonedge",
"id": "recursiveCharacterTextSplitter_0-recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-TextSplitter-github_0-github_0-input-textSplitter-TextSplitter",
"id": "pineconeUpsert_0-pineconeUpsert_0-output-pineconeUpsert-BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"data": {
"label": ""
}
},
{
"source": "openAIEmbeddings_0",
"sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
"target": "pineconeUpsert_0",
"targetHandle": "pineconeUpsert_0-input-embeddings-Embeddings",
"type": "buttonedge",
"id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeUpsert_0-pineconeUpsert_0-input-embeddings-Embeddings",
"data": {
"label": ""
}
@ -405,23 +416,12 @@
}
},
{
"source": "openAIEmbeddings_0",
"sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-Embeddings",
"target": "pineconeUpsert_0",
"targetHandle": "pineconeUpsert_0-input-embeddings-Embeddings",
"source": "recursiveCharacterTextSplitter_0",
"sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter",
"target": "github_0",
"targetHandle": "github_0-input-textSplitter-TextSplitter",
"type": "buttonedge",
"id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-Embeddings-pineconeUpsert_0-pineconeUpsert_0-input-embeddings-Embeddings",
"data": {
"label": ""
}
},
{
"source": "pineconeUpsert_0",
"sourceHandle": "pineconeUpsert_0-output-pineconeUpsert-BaseRetriever",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"type": "buttonedge",
"id": "pineconeUpsert_0-pineconeUpsert_0-output-pineconeUpsert-BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"id": "recursiveCharacterTextSplitter_0-recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter-github_0-github_0-input-textSplitter-TextSplitter",
"data": {
"label": ""
}

View File

@ -3,102 +3,11 @@
"nodes": [
{
"width": 300,
"height": 279,
"id": "mrlkAgentLLM_0",
"position": {
"x": 1520.156054894558,
"y": 466.34196346475386
},
"type": "customNode",
"data": {
"id": "mrlkAgentLLM_0",
"label": "MRLK Agent for LLMs",
"name": "mrlkAgentLLM",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor"],
"category": "Agents",
"description": "Agent that uses the ReAct Framework to decide what action to take, optimized to be used with LLMs",
"inputParams": [],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "mrlkAgentLLM_0-input-tools-Tool"
},
{
"label": "LLM Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "mrlkAgentLLM_0-input-model-BaseLanguageModel"
}
],
"inputs": {
"tools": ["{{calculator_0.data.instance}}", "{{serpAPI_0.data.instance}}"],
"model": "{{openAI_0.data.instance}}"
},
"outputAnchors": [
{
"id": "mrlkAgentLLM_0-output-mrlkAgentLLM-AgentExecutor",
"name": "mrlkAgentLLM",
"label": "AgentExecutor",
"type": "AgentExecutor"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1520.156054894558,
"y": 466.34196346475386
},
"dragging": false
},
{
"width": 300,
"height": 142,
"id": "calculator_0",
"position": {
"x": 1141.0497522733922,
"y": 172.32224599434292
},
"type": "customNode",
"data": {
"id": "calculator_0",
"label": "Calculator",
"name": "calculator",
"type": "Calculator",
"baseClasses": ["Tool"],
"category": "Tools",
"description": "Perform calculations on response",
"inputParams": [],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "calculator_0-output-calculator-Tool",
"name": "calculator",
"label": "Calculator",
"type": "Tool"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1141.0497522733922,
"y": 172.32224599434292
},
"dragging": false
},
{
"width": 300,
"height": 277,
"height": 278,
"id": "serpAPI_0",
"position": {
"x": 797.0350733284566,
"y": 199.36655049779267
"x": 571.585786437627,
"y": 127.14213562373095
},
"type": "customNode",
"data": {
@ -106,7 +15,7 @@
"label": "Serp API",
"name": "serpAPI",
"type": "SerpAPI",
"baseClasses": ["Tool"],
"baseClasses": ["SerpAPI", "Tool"],
"category": "Tools",
"description": "Wrapper around SerpAPI - a real-time API to access Google search results",
"inputParams": [
@ -120,28 +29,119 @@
"inputs": {},
"outputAnchors": [
{
"id": "serpAPI_0-output-serpAPI-Tool",
"id": "serpAPI_0-output-serpAPI-SerpAPI|Tool",
"name": "serpAPI",
"label": "SerpAPI",
"type": "Tool"
"type": "SerpAPI | Tool"
}
],
"selected": false
},
"positionAbsolute": {
"x": 571.585786437627,
"y": 127.14213562373095
},
"selected": false,
"dragging": false
},
{
"width": 300,
"height": 143,
"id": "calculator_0",
"position": {
"x": 904.7519886598635,
"y": 135.4705627484772
},
"type": "customNode",
"data": {
"id": "calculator_0",
"label": "Calculator",
"name": "calculator",
"type": "Calculator",
"baseClasses": ["Calculator", "Tool"],
"category": "Tools",
"description": "Perform calculations on response",
"inputParams": [],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "calculator_0-output-calculator-Calculator|Tool",
"name": "calculator",
"label": "Calculator",
"type": "Calculator | Tool"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 797.0350733284566,
"y": 199.36655049779267
"x": 904.7519886598635,
"y": 135.4705627484772
},
"dragging": false
},
{
"width": 300,
"height": 471,
"height": 280,
"id": "mrklAgentLLM_0",
"position": {
"x": 1251.3621617151298,
"y": 323.8137084989848
},
"type": "customNode",
"data": {
"id": "mrklAgentLLM_0",
"label": "MRKL Agent for LLMs",
"name": "mrklAgentLLM",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain"],
"category": "Agents",
"description": "Agent that uses the ReAct Framework to decide what action to take, optimized to be used with LLMs",
"inputParams": [],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "mrklAgentLLM_0-input-tools-Tool"
},
{
"label": "LLM Model",
"name": "model",
"type": "BaseLLM",
"id": "mrklAgentLLM_0-input-model-BaseLLM"
}
],
"inputs": {
"tools": ["{{calculator_0.data.instance}}", "{{serpAPI_0.data.instance}}"],
"model": "{{openAI_0.data.instance}}"
},
"outputAnchors": [
{
"id": "mrklAgentLLM_0-output-mrklAgentLLM-AgentExecutor|BaseChain",
"name": "mrklAgentLLM",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1251.3621617151298,
"y": 323.8137084989848
},
"dragging": false
},
{
"width": 300,
"height": 472,
"id": "openAI_0",
"position": {
"x": 917.6484006031452,
"y": 522.1507882519595
"x": 708.9235615351174,
"y": 438.84314575050763
},
"type": "customNode",
"data": {
@ -149,7 +149,7 @@
"label": "OpenAI",
"name": "openAI",
"type": "OpenAI",
"baseClasses": ["BaseLLM", "BaseLanguageModel"],
"baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Wrapper around OpenAI large language models",
"inputParams": [
@ -198,52 +198,52 @@
},
"outputAnchors": [
{
"id": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"id": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"name": "openAI",
"label": "OpenAI",
"type": "BaseLLM | BaseLanguageModel"
"type": "OpenAI | BaseLLM | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 917.6484006031452,
"y": 522.1507882519595
"x": 708.9235615351174,
"y": 438.84314575050763
},
"selected": false,
"dragging": false
}
],
"edges": [
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"target": "mrlkAgentLLM_0",
"targetHandle": "mrlkAgentLLM_0-input-model-BaseLanguageModel",
"sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"target": "mrklAgentLLM_0",
"targetHandle": "mrklAgentLLM_0-input-model-BaseLLM",
"type": "buttonedge",
"id": "openAI_0-openAI_0-output-openAI-BaseLLM|BaseLanguageModel-mrlkAgentLLM_0-mrlkAgentLLM_0-input-model-BaseLanguageModel",
"id": "openAI_0-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-mrklAgentLLM_0-mrklAgentLLM_0-input-model-BaseLLM",
"data": {
"label": ""
}
},
{
"source": "calculator_0",
"sourceHandle": "calculator_0-output-calculator-Tool",
"target": "mrlkAgentLLM_0",
"targetHandle": "mrlkAgentLLM_0-input-tools-Tool",
"sourceHandle": "calculator_0-output-calculator-Calculator|Tool",
"target": "mrklAgentLLM_0",
"targetHandle": "mrklAgentLLM_0-input-tools-Tool",
"type": "buttonedge",
"id": "calculator_0-calculator_0-output-calculator-Tool-mrlkAgentLLM_0-mrlkAgentLLM_0-input-tools-Tool",
"id": "calculator_0-calculator_0-output-calculator-Calculator|Tool-mrklAgentLLM_0-mrklAgentLLM_0-input-tools-Tool",
"data": {
"label": ""
}
},
{
"source": "serpAPI_0",
"sourceHandle": "serpAPI_0-output-serpAPI-Tool",
"target": "mrlkAgentLLM_0",
"targetHandle": "mrlkAgentLLM_0-input-tools-Tool",
"sourceHandle": "serpAPI_0-output-serpAPI-SerpAPI|Tool",
"target": "mrklAgentLLM_0",
"targetHandle": "mrklAgentLLM_0-input-tools-Tool",
"type": "buttonedge",
"id": "serpAPI_0-serpAPI_0-output-serpAPI-Tool-mrlkAgentLLM_0-mrlkAgentLLM_0-input-tools-Tool",
"id": "serpAPI_0-serpAPI_0-output-serpAPI-SerpAPI|Tool-mrklAgentLLM_0-mrklAgentLLM_0-input-tools-Tool",
"data": {
"label": ""
}

View File

@ -3,74 +3,11 @@
"nodes": [
{
"width": 300,
"height": 460,
"id": "llmChain_0",
"position": {
"x": 1515.563392772433,
"y": 334.61150897841924
},
"type": "customNode",
"data": {
"id": "llmChain_0",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["BaseChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 5,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true
}
],
"inputAnchors": [
{
"label": "LLM",
"name": "llm",
"type": "BaseLanguageModel",
"id": "llmChain_0-input-llm-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_0-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"llm": "{{openAI_0.data.instance}}",
"prompt": "{{promptTemplate_0.data.instance}}",
"promptValues": ""
},
"outputAnchors": [
{
"id": "llmChain_0-output-llmChain-BaseChain",
"name": "llmChain",
"label": "LLMChain",
"type": "BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1515.563392772433,
"y": 334.61150897841924
},
"dragging": false
},
{
"width": 300,
"height": 471,
"height": 472,
"id": "openAI_0",
"position": {
"x": 954.7026430819806,
"y": 6.975032607064918
"x": 968.1753795547951,
"y": -8.62176310944858
},
"type": "customNode",
"data": {
@ -78,7 +15,7 @@
"label": "OpenAI",
"name": "openAI",
"type": "OpenAI",
"baseClasses": ["BaseLLM", "BaseLanguageModel"],
"baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Wrapper around OpenAI large language models",
"inputParams": [
@ -127,28 +64,28 @@
},
"outputAnchors": [
{
"id": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"id": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"name": "openAI",
"label": "OpenAI",
"type": "BaseLLM | BaseLanguageModel"
"type": "OpenAI | BaseLLM | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 954.7026430819806,
"y": 6.975032607064918
"x": 968.1753795547951,
"y": -8.62176310944858
},
"dragging": false
},
{
"width": 300,
"height": 359,
"height": 360,
"id": "promptTemplate_0",
"position": {
"x": 954.1542757936061,
"y": 515.2247261712328
"x": 970.576876549135,
"y": 502.493937944275
},
"type": "customNode",
"data": {
@ -156,7 +93,7 @@
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["BaseStringPromptTemplate", "BasePromptTemplate"],
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
@ -174,41 +111,104 @@
},
"outputAnchors": [
{
"id": "promptTemplate_0-output-promptTemplate-BaseStringPromptTemplate|BasePromptTemplate",
"id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "BaseStringPromptTemplate | BasePromptTemplate"
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 954.1542757936061,
"y": 515.2247261712328
"x": 970.576876549135,
"y": 502.493937944275
},
"dragging": false
},
{
"width": 300,
"height": 461,
"id": "llmChain_0",
"position": {
"x": 1414.1175742139496,
"y": 340.4040954840462
},
"type": "customNode",
"data": {
"id": "llmChain_0",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["LLMChain", "BaseChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 5,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "llmChain_0-input-model-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_0-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"model": "{{openAI_0.data.instance}}",
"prompt": "{{promptTemplate_0.data.instance}}",
"promptValues": ""
},
"outputAnchors": [
{
"id": "llmChain_0-output-llmChain-LLMChain|BaseChain",
"name": "llmChain",
"label": "LLMChain",
"type": "LLMChain | BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1414.1175742139496,
"y": 340.4040954840462
},
"dragging": false
}
],
"edges": [
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"source": "promptTemplate_0",
"sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-llm-BaseLanguageModel",
"targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "openAI_0-openAI_0-output-openAI-BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-llm-BaseLanguageModel",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}
},
{
"source": "promptTemplate_0",
"sourceHandle": "promptTemplate_0-output-promptTemplate-BaseStringPromptTemplate|BasePromptTemplate",
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
"targetHandle": "llmChain_0-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
"id": "openAI_0-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-model-BaseLanguageModel",
"data": {
"label": ""
}

View File

@ -4,143 +4,10 @@
{
"width": 300,
"height": 460,
"id": "llmChain_0",
"position": {
"x": 1301.8762472836022,
"y": 772.7199253009146
},
"type": "customNode",
"data": {
"id": "llmChain_0",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["BaseChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 5,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true
}
],
"inputAnchors": [
{
"label": "LLM",
"name": "llm",
"type": "BaseLanguageModel",
"id": "llmChain_0-input-llm-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_0-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"llm": "{{chatOpenAI_0.data.instance}}",
"prompt": "{{chatPromptTemplate_0.data.instance}}",
"promptValues": "{\n \"input_language\": \"English\",\n \"output_language\": \"Italian\"\n}"
},
"outputAnchors": [
{
"id": "llmChain_0-output-llmChain-BaseChain",
"name": "llmChain",
"label": "LLMChain",
"type": "BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1301.8762472836022,
"y": 772.7199253009146
},
"dragging": false
},
{
"width": 300,
"height": 471,
"id": "chatOpenAI_0",
"position": {
"x": 821.5000372338304,
"y": 369.8333649665954
},
"type": "customNode",
"data": {
"id": "chatOpenAI_0",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
"description": "Wrapper around OpenAI large language models that use the Chat endpoint",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
},
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
}
],
"default": "gpt-3.5-turbo",
"optional": true
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.9,
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"modelName": "gpt-3.5-turbo",
"temperature": "0"
},
"outputAnchors": [
{
"id": "chatOpenAI_0-output-chatOpenAI-BaseChatModel|BaseLanguageModel",
"name": "chatOpenAI",
"label": "ChatOpenAI",
"type": "BaseChatModel | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 821.5000372338304,
"y": 369.8333649665954
},
"dragging": false
},
{
"width": 300,
"height": 459,
"id": "chatPromptTemplate_0",
"position": {
"x": 821.3479428749118,
"y": 871.7203878238932
"x": 524,
"y": 237
},
"type": "customNode",
"data": {
@ -148,7 +15,7 @@
"label": "Chat Prompt Template",
"name": "chatPromptTemplate",
"type": "ChatPromptTemplate",
"baseClasses": ["BasePromptTemplate"],
"baseClasses": ["ChatPromptTemplate", "BaseChatPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a chat prompt",
"inputParams": [
@ -174,18 +41,163 @@
},
"outputAnchors": [
{
"id": "chatPromptTemplate_0-output-chatPromptTemplate-BasePromptTemplate",
"id": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate",
"name": "chatPromptTemplate",
"label": "ChatPromptTemplate",
"type": "BasePromptTemplate"
"type": "ChatPromptTemplate | BaseChatPromptTemplate | BasePromptTemplate"
}
],
"selected": false
},
"selected": false,
"dragging": false,
"positionAbsolute": {
"x": 524,
"y": 237
}
},
{
"width": 300,
"height": 472,
"id": "chatOpenAI_0",
"position": {
"x": 855.1997276913991,
"y": 24.090553068402556
},
"type": "customNode",
"data": {
"id": "chatOpenAI_0",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
"description": "Wrapper around OpenAI large language models that use the Chat endpoint",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "gpt-4",
"name": "gpt-4"
},
{
"label": "gpt-4-0314",
"name": "gpt-4-0314"
},
{
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
},
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
}
],
"default": "gpt-3.5-turbo",
"optional": true
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.9,
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"modelName": "gpt-3.5-turbo",
"temperature": 0.9
},
"outputAnchors": [
{
"id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"name": "chatOpenAI",
"label": "ChatOpenAI",
"type": "ChatOpenAI | BaseChatModel | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 821.3479428749118,
"y": 871.7203878238932
"x": 855.1997276913991,
"y": 24.090553068402556
},
"dragging": false
},
{
"width": 300,
"height": 461,
"id": "llmChain_0",
"position": {
"x": 1192.2235692202612,
"y": 361.71736677076257
},
"type": "customNode",
"data": {
"id": "llmChain_0",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["LLMChain", "BaseChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 5,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "llmChain_0-input-model-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_0-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"model": "{{chatOpenAI_0.data.instance}}",
"prompt": "{{chatPromptTemplate_0.data.instance}}",
"promptValues": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}"
},
"outputAnchors": [
{
"id": "llmChain_0-output-llmChain-LLMChain|BaseChain",
"name": "llmChain",
"label": "LLMChain",
"type": "LLMChain | BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1192.2235692202612,
"y": 361.71736677076257
},
"dragging": false
}
@ -193,22 +205,22 @@
"edges": [
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-BaseChatModel|BaseLanguageModel",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-llm-BaseLanguageModel",
"targetHandle": "llmChain_0-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-BaseChatModel|BaseLanguageModel-llmChain_0-llmChain_0-input-llm-BaseLanguageModel",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-llmChain_0-llmChain_0-input-model-BaseLanguageModel",
"data": {
"label": ""
}
},
{
"source": "chatPromptTemplate_0",
"sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-BasePromptTemplate",
"sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
"id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}

View File

@ -23,9 +23,6 @@ export class NodesPool {
const newNodeInstance = new nodeModule.nodeClass()
newNodeInstance.filePath = file
const baseClasses = await newNodeInstance.getBaseClasses!.call(newNodeInstance)
newNodeInstance.baseClasses = baseClasses
this.componentNodes[newNodeInstance.name] = newNodeInstance
// Replace file icon with absolute path

View File

@ -107,7 +107,9 @@ export const getEndingNode = (nodeDependencies: INodeDependencies, graph: INodeD
// Find ending node
let endingNodeId = ''
Object.keys(graph).forEach((nodeId) => {
if (!graph[nodeId].length && nodeDependencies[nodeId] > 0) {
if (Object.keys(nodeDependencies).length === 1) {
endingNodeId = nodeId
} else if (!graph[nodeId].length && nodeDependencies[nodeId] > 0) {
endingNodeId = nodeId
}
})

View File

@ -3,15 +3,21 @@ import { Handle, Position, useUpdateNodeInternals } from 'reactflow'
import { useEffect, useRef, useState, useContext } from 'react'
// material-ui
import { useTheme } from '@mui/material/styles'
import { useTheme, styled } from '@mui/material/styles'
import { Box, Typography, Tooltip } from '@mui/material'
import { tooltipClasses } from '@mui/material/Tooltip'
import { Dropdown } from 'ui-component/dropdown/Dropdown'
import { Input } from 'ui-component/input/Input'
import { File } from 'ui-component/file/File'
import { flowContext } from 'store/context/ReactFlowContext'
import { isValidConnection } from 'utils/genericHelper'
const CustomWidthTooltip = styled(({ className, ...props }) => <Tooltip {...props} classes={{ popper: className }} />)({
[`& .${tooltipClasses.tooltip}`]: {
maxWidth: 500
}
})
// ===========================|| NodeInputHandler ||=========================== //
const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false }) => {
@ -36,14 +42,7 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false }) =
<div ref={ref}>
{inputAnchor && (
<>
<Tooltip
placement='left'
title={
<Typography sx={{ color: 'white', p: 1 }} variant='h5'>
{'Type: ' + inputAnchor.type}
</Typography>
}
>
<CustomWidthTooltip placement='left' title={inputAnchor.type}>
<Handle
type='target'
position={Position.Left}
@ -57,7 +56,7 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false }) =
top: position
}}
/>
</Tooltip>
</CustomWidthTooltip>
<Box sx={{ p: 2 }}>
<Typography>
{inputAnchor.label}

View File

@ -3,11 +3,18 @@ import { Handle, Position, useUpdateNodeInternals } from 'reactflow'
import { useEffect, useRef, useState, useContext } from 'react'
// material-ui
import { useTheme } from '@mui/material/styles'
import { useTheme, styled } from '@mui/material/styles'
import { Box, Typography, Tooltip } from '@mui/material'
import { tooltipClasses } from '@mui/material/Tooltip'
import { flowContext } from 'store/context/ReactFlowContext'
import { isValidConnection } from 'utils/genericHelper'
const CustomWidthTooltip = styled(({ className, ...props }) => <Tooltip {...props} classes={{ popper: className }} />)({
[`& .${tooltipClasses.tooltip}`]: {
maxWidth: 500
}
})
// ===========================|| NodeOutputHandler ||=========================== //
const NodeOutputHandler = ({ outputAnchor, data }) => {
@ -34,14 +41,7 @@ const NodeOutputHandler = ({ outputAnchor, data }) => {
return (
<div ref={ref}>
<Tooltip
placement='right'
title={
<Typography sx={{ color: 'white', p: 1 }} variant='h5'>
{'Type: ' + outputAnchor.type}
</Typography>
}
>
<CustomWidthTooltip placement='right' title={outputAnchor.type}>
<Handle
type='source'
position={Position.Right}
@ -55,7 +55,7 @@ const NodeOutputHandler = ({ outputAnchor, data }) => {
top: position
}}
/>
</Tooltip>
</CustomWidthTooltip>
<Box sx={{ p: 2, textAlign: 'end' }}>
<Typography>{outputAnchor.label}</Typography>
</Box>

View File

@ -1,7 +1,3 @@
.cloudform {
position: relative;
}
.messagelist {
width: 100%;
height: 100%;
@ -113,13 +109,11 @@
position: relative;
flex-direction: column;
padding: 10px;
max-width: 500px;
}
.cloud {
width: '100%';
max-width: 500px;
height: 73vh;
width: 400px;
height: calc(100vh - 260px);
border-radius: 0.5rem;
display: flex;
justify-content: center;

View File

@ -336,13 +336,13 @@ export const ChatMessage = ({ chatflowid }) => {
</div>
<Divider />
<div className='center'>
<div className='cloudform'>
<form onSubmit={handleSubmit}>
<div style={{ width: '100%' }}>
<form style={{ width: '100%' }} onSubmit={handleSubmit}>
<OutlinedInput
inputRef={inputRef}
// eslint-disable-next-line
autoFocus
sx={{ width: '50vh' }}
sx={{ width: '100%' }}
disabled={loading || !chatflowid}
onKeyDown={handleEnter}
id='userInput'