Merge branch 'FlowiseAI:main' into bug/ChatInput

This commit is contained in:
Vikram Segta 2023-07-06 10:33:05 +05:30 committed by GitHub
commit c74dec7133
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
122 changed files with 4906 additions and 1302 deletions

2
.github/FUNDING.yml vendored
View File

@ -1,6 +1,6 @@
# These are supported funding model platforms
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
github: [FlowiseAI] # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username

View File

@ -1,11 +1,11 @@
<!-- markdownlint-disable MD030 -->
# Flowise - LangchainJS UI
# Flowise
<a href="https://github.com/FlowiseAI/Flowise">
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.gif?raw=true"></a>
Drag & drop UI to build your customized LLM flow using [LangchainJS](https://github.com/hwchase17/langchainjs)
Drag & drop UI to build your customized LLM flow
## ⚡Quick Start

View File

@ -1,3 +1,7 @@
PORT=3000
# FLOWISE_USERNAME=user
# FLOWISE_PASSWORD=1234
# DEBUG=true
# DATABASE_PATH=/your_database_path/.flowise
# APIKEY_PATH=/your_api_key_path/.flowise
# EXECUTION_MODE=child or main

View File

@ -8,6 +8,10 @@ services:
- PORT=${PORT}
- FLOWISE_USERNAME=${FLOWISE_USERNAME}
- FLOWISE_PASSWORD=${FLOWISE_PASSWORD}
- DATABASE_PATH=${DATABASE_PATH}
- APIKEY_PATH=${APIKEY_PATH}
- EXECUTION_MODE=${EXECUTION_MODE}
- DEBUG=${DEBUG}
ports:
- '${PORT}:${PORT}'
volumes:

View File

@ -1,6 +1,6 @@
{
"name": "flowise",
"version": "1.2.12",
"version": "1.2.14",
"private": true,
"homepage": "https://flowiseai.com",
"workspaces": [

View File

@ -1 +0,0 @@
DEBUG=true

View File

@ -12,14 +12,6 @@ Install:
npm i flowise-components
```
## Debug
To view all the logs, create an `.env` file and add:
```
DEBUG=true
```
## License
Source code in this repository is made available under the [MIT License](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md).

View File

@ -1,9 +1,10 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents'
import { Tool } from 'langchain/tools'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { flatten } from 'lodash'
import { BaseChatMemory, ChatMessageHistory } from 'langchain/memory'
import { AIChatMessage, HumanChatMessage } from 'langchain/schema'
class OpenAIFunctionAgent_Agents implements INode {
label: string
@ -30,30 +31,67 @@ class OpenAIFunctionAgent_Agents implements INode {
type: 'Tool',
list: true
},
{
label: 'Memory',
name: 'memory',
type: 'BaseChatMemory'
},
{
label: 'OpenAI Chat Model',
name: 'model',
description:
'Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer <a target="_blank" href="https://platform.openai.com/docs/guides/gpt/function-calling">docs</a> for more info',
type: 'BaseChatModel'
},
{
label: 'System Message',
name: 'systemMessage',
type: 'string',
rows: 4,
optional: true,
additionalParams: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel
let tools = nodeData.inputs?.tools as Tool[]
const memory = nodeData.inputs?.memory as BaseChatMemory
const systemMessage = nodeData.inputs?.systemMessage as string
let tools = nodeData.inputs?.tools
tools = flatten(tools)
const executor = await initializeAgentExecutorWithOptions(tools, model, {
agentType: 'openai-functions',
verbose: process.env.DEBUG === 'true' ? true : false
verbose: process.env.DEBUG === 'true' ? true : false,
agentArgs: {
prefix: systemMessage ?? `You are a helpful AI assistant.`
}
})
if (memory) executor.memory = memory
return executor
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const executor = nodeData.instance as AgentExecutor
const memory = nodeData.inputs?.memory as BaseChatMemory
if (options && options.chatHistory) {
const chatHistory = []
const histories: IMessage[] = options.chatHistory
for (const message of histories) {
if (message.type === 'apiMessage') {
chatHistory.push(new AIChatMessage(message.message))
} else if (message.type === 'userMessage') {
chatHistory.push(new HumanChatMessage(message.message))
}
}
memory.chatHistory = new ChatMessageHistory(chatHistory)
executor.memory = memory
}
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)

View File

@ -50,7 +50,7 @@ class LLMChain_Chains implements INode {
{
label: 'Output Prediction',
name: 'outputPrediction',
baseClasses: ['string']
baseClasses: ['string', 'json']
}
]
}

View File

@ -43,6 +43,10 @@ class AzureChatOpenAI_ChatModels implements INode {
{
label: 'gpt-35-turbo',
name: 'gpt-35-turbo'
},
{
label: 'gpt-35-turbo-16k',
name: 'gpt-35-turbo-16k'
}
],
default: 'gpt-35-turbo',
@ -70,14 +74,10 @@ class AzureChatOpenAI_ChatModels implements INode {
{
label: 'Azure OpenAI Api Version',
name: 'azureOpenAIApiVersion',
type: 'options',
options: [
{
label: '2023-03-15-preview',
name: '2023-03-15-preview'
}
],
default: '2023-03-15-preview'
type: 'string',
placeholder: '2023-06-01-preview',
description:
'Description of Supported API Versions. Please refer <a target="_blank" href="https://learn.microsoft.com/en-us/azure/cognitive-services/openai/reference#chat-completions">examples</a>'
},
{
label: 'Max Tokens',
@ -124,7 +124,7 @@ class AzureChatOpenAI_ChatModels implements INode {
const streaming = nodeData.inputs?.streaming as boolean
const obj: Partial<AzureOpenAIInput> & Partial<OpenAIBaseInput> = {
temperature: parseInt(temperature, 10),
temperature: parseFloat(temperature),
modelName,
azureOpenAIApiKey,
azureOpenAIApiInstanceName,

View File

@ -120,7 +120,7 @@ class ChatAnthropic_ChatModels implements INode {
const streaming = nodeData.inputs?.streaming as boolean
const obj: Partial<AnthropicInput> & { anthropicApiKey?: string } = {
temperature: parseInt(temperature, 10),
temperature: parseFloat(temperature),
modelName,
anthropicApiKey,
streaming: streaming ?? true

View File

@ -89,7 +89,7 @@ class ChatHuggingFace_ChatModels implements INode {
apiKey
}
if (temperature) obj.temperature = parseInt(temperature, 10)
if (temperature) obj.temperature = parseFloat(temperature)
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
if (topP) obj.topP = parseInt(topP, 10)
if (hfTopK) obj.topK = parseInt(hfTopK, 10)

View File

@ -74,7 +74,7 @@ class ChatLocalAI_ChatModels implements INode {
const basePath = nodeData.inputs?.basePath as string
const obj: Partial<OpenAIChatInput> & { openAIApiKey?: string } = {
temperature: parseInt(temperature, 10),
temperature: parseFloat(temperature),
modelName,
openAIApiKey: 'sk-'
}

View File

@ -132,7 +132,7 @@ class ChatOpenAI_ChatModels implements INode {
const basePath = nodeData.inputs?.basepath as string
const obj: Partial<OpenAIChatInput> & { openAIApiKey?: string } = {
temperature: parseInt(temperature, 10),
temperature: parseFloat(temperature),
modelName,
openAIApiKey,
streaming: streaming ?? true

View File

@ -0,0 +1,82 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { TextSplitter } from 'langchain/text_splitter'
import { GitbookLoader } from 'langchain/document_loaders/web/gitbook'
class Gitbook_DocumentLoaders implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs?: INodeParams[]
constructor() {
this.label = 'GitBook'
this.name = 'gitbook'
this.type = 'Document'
this.icon = 'gitbook.svg'
this.category = 'Document Loaders'
this.description = `Load data from GitBook`
this.baseClasses = [this.type]
this.inputs = [
{
label: 'Web Path',
name: 'webPath',
type: 'string',
placeholder: 'https://docs.gitbook.com/product-tour/navigation',
description: 'If want to load all paths from the GitBook provide only root path e.g.https://docs.gitbook.com/ '
},
{
label: 'Should Load All Paths',
name: 'shouldLoadAllPaths',
type: 'boolean',
description: 'Load from all paths in a given GitBook',
optional: true
},
{
label: 'Text Splitter',
name: 'textSplitter',
type: 'TextSplitter',
optional: true
},
{
label: 'Metadata',
name: 'metadata',
type: 'json',
optional: true,
additionalParams: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const webPath = nodeData.inputs?.webPath as string
const shouldLoadAllPaths = nodeData.inputs?.shouldLoadAllPaths as boolean
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
const metadata = nodeData.inputs?.metadata
const loader = shouldLoadAllPaths ? new GitbookLoader(webPath, { shouldLoadAllPaths }) : new GitbookLoader(webPath)
const docs = textSplitter ? await loader.loadAndSplit() : await loader.load()
if (metadata) {
const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata)
return docs.map((doc) => {
return {
...doc,
metadata: {
...doc.metadata,
...parsedMetadata
}
}
})
}
return docs
}
}
module.exports = {
nodeClass: Gitbook_DocumentLoaders
}

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="64" height="64"><switch><g><path d="M28.8 47.4c1 0 1.9.8 1.9 1.9 0 1-.8 1.9-1.9 1.9-1 0-1.9-.8-1.9-1.9 0-1.1.9-1.9 1.9-1.9m29.4-11.6c-1 0-1.9-.8-1.9-1.9 0-1 .8-1.9 1.9-1.9 1 0 1.9.8 1.9 1.9 0 1-.9 1.9-1.9 1.9m0-7.7c-3.2 0-5.8 2.6-5.8 5.8 0 .6.1 1.2.3 1.8L33.6 45.9c-1.1-1.6-2.9-2.5-4.8-2.5-2.2 0-4.2 1.3-5.2 3.2l-17.2-9c-1.8-1-3.2-3.9-3-6.7.1-1.4.6-2.5 1.3-2.9.5-.3 1-.2 1.7.1l.1.1c4.6 2.4 19.5 10.2 20.1 10.5 1 .4 1.5.6 3.2-.2l30.8-16c.5-.2 1-.6 1-1.3 0-.9-.9-1.3-.9-1.3-1.8-.8-4.5-2.1-7.1-3.3C48 14 41.6 11 38.8 9.5c-2.4-1.3-4.4-.2-4.7 0l-.7.3C20.7 16.2 3.9 24.5 2.9 25.1c-1.7 1-2.8 3.1-2.9 5.7-.2 4.1 1.9 8.4 4.9 9.9l18.2 9.4c.4 2.8 2.9 5 5.7 5 3.2 0 5.7-2.5 5.8-5.7l20-10.8c1 .8 2.3 1.2 3.6 1.2 3.2 0 5.8-2.6 5.8-5.8 0-3.3-2.6-5.9-5.8-5.9" fill="#4285fd"/></g></switch></svg>

After

Width:  |  Height:  |  Size: 826 B

View File

@ -0,0 +1,106 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { TextSplitter } from 'langchain/text_splitter'
import { JSONLinesLoader } from 'langchain/document_loaders/fs/json'
class Jsonlines_DocumentLoaders implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'Json Lines File'
this.name = 'jsonlinesFile'
this.type = 'Document'
this.icon = 'jsonlines.svg'
this.category = 'Document Loaders'
this.description = `Load data from JSON Lines files`
this.baseClasses = [this.type]
this.inputs = [
{
label: 'Jsonlines File',
name: 'jsonlinesFile',
type: 'file',
fileType: '.jsonl'
},
{
label: 'Text Splitter',
name: 'textSplitter',
type: 'TextSplitter',
optional: true
},
{
label: 'Pointer Extraction',
name: 'pointerName',
type: 'string',
placeholder: 'Enter pointer name',
optional: false
},
{
label: 'Metadata',
name: 'metadata',
type: 'json',
optional: true,
additionalParams: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
const jsonLinesFileBase64 = nodeData.inputs?.jsonlinesFile as string
const pointerName = nodeData.inputs?.pointerName as string
const metadata = nodeData.inputs?.metadata
let alldocs = []
let files: string[] = []
let pointer = '/' + pointerName.trim()
if (jsonLinesFileBase64.startsWith('[') && jsonLinesFileBase64.endsWith(']')) {
files = JSON.parse(jsonLinesFileBase64)
} else {
files = [jsonLinesFileBase64]
}
for (const file of files) {
const splitDataURI = file.split(',')
splitDataURI.pop()
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
const blob = new Blob([bf])
const loader = new JSONLinesLoader(blob, pointer)
if (textSplitter) {
const docs = await loader.loadAndSplit(textSplitter)
alldocs.push(...docs)
} else {
const docs = await loader.load()
alldocs.push(...docs)
}
}
if (metadata) {
const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata)
let finaldocs = []
for (const doc of alldocs) {
const newdoc = {
...doc,
metadata: {
...doc.metadata,
...parsedMetadata
}
}
finaldocs.push(newdoc)
}
return finaldocs
}
return alldocs
}
}
module.exports = { nodeClass: Jsonlines_DocumentLoaders }

View File

@ -0,0 +1,16 @@
<svg width="24" height="24" xmlns="http://www.w3.org/2000/svg">
<!-- Created with Method Draw - http://github.com/duopixel/Method-Draw/ -->
<g>
<title>background</title>
<rect fill="none" id="canvas_background" height="26" width="26" y="-1" x="-1"/>
<g display="none" overflow="visible" y="0" x="0" height="100%" width="100%" id="canvasGrid">
<rect fill="url(#gridpattern)" stroke-width="0" y="0" x="0" height="100%" width="100%"/>
</g>
</g>
<g>
<title>Layer 1</title>
<text font-weight="bold" stroke="#000" transform="matrix(8.682896011956823,0,0,10.412942243751806,-30.866304860177404,-63.784276261342) " xml:space="preserve" text-anchor="start" font-family="Helvetica, Arial, sans-serif" font-size="1" id="svg_2" y="7.062874" x="3.579384" stroke-opacity="null" stroke-width="0" fill="#000000">JSON</text>
<text font-weight="bold" stroke="#000" transform="matrix(9.059566511875573,0,0,9.893934811310315,-1.3962337706973242,-106.08964247698567) " xml:space="preserve" text-anchor="start" font-family="Helvetica, Arial, sans-serif" font-size="1" id="svg_3" y="12.90427" x="0.172236" stroke-opacity="null" stroke-width="0" fill="#000000">Lines</text>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@ -43,7 +43,7 @@ class AzureOpenAIEmbedding_Embeddings implements INode {
label: 'Azure OpenAI Api Version',
name: 'azureOpenAIApiVersion',
type: 'string',
placeholder: 'YOUR-API-VERSION',
placeholder: '2023-03-15-preview',
description:
'Description of Supported API Versions. Please refer <a target="_blank" href="https://learn.microsoft.com/en-us/azure/cognitive-services/openai/reference#embeddings">examples</a>'
},

View File

@ -105,18 +105,10 @@ class AzureOpenAI_LLMs implements INode {
{
label: 'Azure OpenAI Api Version',
name: 'azureOpenAIApiVersion',
type: 'options',
options: [
{
label: '2023-03-15-preview',
name: '2023-03-15-preview'
},
{
label: '2022-12-01',
name: '2022-12-01'
}
],
default: '2023-03-15-preview'
type: 'string',
placeholder: '2023-06-01-preview',
description:
'Description of Supported API Versions. Please refer <a target="_blank" href="https://learn.microsoft.com/en-us/azure/cognitive-services/openai/reference#completions">examples</a>'
},
{
label: 'Max Tokens',
@ -179,7 +171,7 @@ class AzureOpenAI_LLMs implements INode {
const streaming = nodeData.inputs?.streaming as boolean
const obj: Partial<AzureOpenAIInput> & Partial<OpenAIInput> = {
temperature: parseInt(temperature, 10),
temperature: parseFloat(temperature),
modelName,
azureOpenAIApiKey,
azureOpenAIApiInstanceName,

View File

@ -87,7 +87,7 @@ class Cohere_LLMs implements INode {
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
if (modelName) obj.model = modelName
if (temperature) obj.temperature = parseInt(temperature, 10)
if (temperature) obj.temperature = parseFloat(temperature)
const model = new Cohere(obj)
return model

View File

@ -89,7 +89,7 @@ class HuggingFaceInference_LLMs implements INode {
apiKey
}
if (temperature) obj.temperature = parseInt(temperature, 10)
if (temperature) obj.temperature = parseFloat(temperature)
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
if (topP) obj.topP = parseInt(topP, 10)
if (hfTopK) obj.topK = parseInt(hfTopK, 10)

View File

@ -132,7 +132,7 @@ class OpenAI_LLMs implements INode {
const basePath = nodeData.inputs?.basepath as string
const obj: Partial<OpenAIInput> & { openAIApiKey?: string } = {
temperature: parseInt(temperature, 10),
temperature: parseFloat(temperature),
modelName,
openAIApiKey,
streaming: streaming ?? true

View File

@ -0,0 +1,100 @@
import { ICommonObject, INode, INodeData, INodeParams, getBaseClasses } from '../../../src'
import { DynamoDBChatMessageHistory } from 'langchain/stores/message/dynamodb'
import { BufferMemory } from 'langchain/memory'
class DynamoDb_Memory implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'DynamoDB Memory'
this.name = 'DynamoDbMemory'
this.icon = 'dynamodb.svg'
this.category = 'Memory'
this.description = 'Stores the conversation in dynamo db table'
this.baseClasses = [this.type, ...getBaseClasses(BufferMemory)]
this.inputs = [
{
label: 'Table Name',
name: 'tableName',
type: 'string'
},
{
label: 'Partition Key',
name: 'partitionKey',
type: 'string'
},
{
label: 'Session ID',
name: 'sessionId',
type: 'string',
description: 'if empty, chatId will be used automatically',
default: '',
additionalParams: true,
optional: true
},
{
label: 'Region',
name: 'region',
type: 'string',
description: 'The aws region in which table is located',
placeholder: 'us-east-1'
},
{
label: 'Access Key',
name: 'accessKey',
type: 'password'
},
{
label: 'Secret Access Key',
name: 'secretAccessKey',
type: 'password'
},
{
label: 'Memory Key',
name: 'memoryKey',
type: 'string',
default: 'chat_history'
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const tableName = nodeData.inputs?.tableName as string
const partitionKey = nodeData.inputs?.partitionKey as string
const sessionId = nodeData.inputs?.sessionId as string
const region = nodeData.inputs?.region as string
const accessKey = nodeData.inputs?.accessKey as string
const secretAccessKey = nodeData.inputs?.secretAccessKey as string
const memoryKey = nodeData.inputs?.memoryKey as string
const chatId = options.chatId
const dynamoDb = new DynamoDBChatMessageHistory({
tableName,
partitionKey,
sessionId: sessionId ? sessionId : chatId,
config: {
region,
credentials: {
accessKeyId: accessKey,
secretAccessKey
}
}
})
const memory = new BufferMemory({
memoryKey,
chatHistory: dynamoDb,
returnMessages: true
})
return memory
}
}
module.exports = { nodeClass: DynamoDb_Memory }

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="24px" height="24px" viewBox="0 0 24 24" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<!-- Generator: Sketch 64 (93537) - https://sketch.com -->
<title>Icon-Architecture/16/Arch_Amazon-DynamoDB_16</title>
<desc>Created with Sketch.</desc>
<defs>
<linearGradient x1="0%" y1="100%" x2="100%" y2="0%" id="linearGradient-1">
<stop stop-color="#2E27AD" offset="0%"></stop>
<stop stop-color="#527FFF" offset="100%"></stop>
</linearGradient>
</defs>
<g id="Icon-Architecture/16/Arch_Amazon-DynamoDB_16" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
<g id="Icon-Architecture-BG/16/Database" fill="url(#linearGradient-1)">
<rect id="Rectangle" x="0" y="0" width="24" height="24"></rect>
</g>
<path d="M14.3871979,13.0634319 L15.4218955,9.61738691 C15.468467,9.46474602 15.4391067,9.29896386 15.3439388,9.17058378 C15.2487709,9.04220369 15.0979197,8.96739955 14.9379567,8.96739955 L14.2383715,8.96739955 L15.2507958,6.94566591 L17.7798316,6.94566591 L16.9881159,9.313116 C16.9374946,9.46676775 16.9628052,9.63659338 17.0589856,9.76800607 C17.153141,9.90042962 17.3060171,9.97826636 17.4690174,9.97826636 L18.095708,9.97826636 L14.3871979,13.0634319 Z M19.9697053,9.29997473 C19.8968108,9.10083397 19.7074875,8.96739955 19.4938659,8.96739955 L18.1706274,8.96739955 L18.9623432,6.59994946 C19.0129644,6.4462977 18.9876538,6.27647207 18.8914735,6.14404852 C18.7963056,6.01263584 18.644442,5.93479909 18.4814417,5.93479909 L14.9379567,5.93479909 C14.7455961,5.93479909 14.5714591,6.04296184 14.485403,6.21379833 L12.9667666,9.24639879 C12.88881,9.40308314 12.8958969,9.58908264 12.9880275,9.73768006 C13.0811706,9.88728835 13.2441709,9.97826636 13.4193203,9.97826636 L14.2576076,9.97826636 L12.9343691,14.3816022 C12.8705863,14.595906 12.9536051,14.8253728 13.1409036,14.9486985 C13.2259472,15.0042962 13.3221275,15.0326005 13.4193203,15.0326005 C13.5347366,15.0326005 13.6491406,14.9931766 13.743296,14.9153399 L19.8178417,9.86100581 C19.980842,9.72453879 20.0425999,9.50113723 19.9697053,9.29997473 L19.9697053,9.29997473 Z M14.8346894,17.6285064 C14.8346894,18.0904726 13.2775809,18.9891332 10.4235568,18.9891332 C7.56953281,18.9891332 6.01242428,18.0904726 6.01242428,17.6285064 L6.01242428,16.562042 C7.04914673,17.1786707 8.74293255,17.495072 10.4235568,17.495072 C12.1041811,17.495072 13.797967,17.1786707 14.8346894,16.562042 L14.8346894,17.6285064 Z M14.8346894,15.1235785 C14.8346894,15.5855446 13.2775809,16.4842052 10.4235568,16.4842052 C7.56953281,16.4842052 6.01242428,15.5855446 6.01242428,15.1235785 C6.01242428,15.0275461 6.08633125,14.9133182 6.21187186,14.7950468 C7.21214704,15.316654 8.74698225,15.6239575 10.4235568,15.6239575 C10.4438053,15.6239575 11.9948393,15.5916098 11.9948393,15.5916098 L11.9948393,14.580743 C11.9745908,14.580743 10.4235568,14.6130907 10.4235568,14.6130907 C8.77128043,14.6130907 7.24656947,14.2886025 6.44574187,13.7680061 C6.17542458,13.5900935 6.0134367,13.3980288 6.01242428,13.252464 L6.01242428,12.1859995 C7.04914673,12.8026283 8.74293255,13.1200404 10.4235568,13.1200404 C10.6898244,13.1200404 11.8348763,13.0391711 12.1922621,13.0138994 L12.213523,12.5054334 L12.1203799,12.0050543 C11.7761557,12.0293151 10.6786878,12.1091736 10.4235568,12.1091736 C7.56953281,12.1091736 6.01242428,11.2095021 6.01242428,10.747536 C6.01242428,10.6474602 6.09139337,10.5281779 6.22503337,10.405863 C7.01877401,10.7566338 8.57183285,11.1508719 12.3178027,11.1963609 L12.3299518,10.1854941 C9.27951741,10.1491029 7.3437622,9.88223402 6.44574187,9.39095274 C6.17542458,9.21304018 6.0134367,9.02097549 6.01242428,8.87541066 L6.01242428,7.80995704 C7.04914673,8.4265858 8.74293255,8.74298711 10.4235568,8.74298711 C10.5015135,8.74298711 12.480803,8.70659591 12.5587596,8.70356331 L12.5152254,7.69370735 C12.4079084,7.69775082 10.5187247,7.73212029 10.4235568,7.73212029 C7.56953281,7.73212029 6.01242428,6.83345969 6.01242428,6.37149356 C6.01242428,5.90952742 7.56953281,5.01086682 10.4235568,5.01086682 C11.7447705,5.01086682 13.0001766,5.21809452 13.8668118,5.5809957 L14.25862,4.64796563 C13.2573324,4.23047763 11.8956217,4 10.4235568,4 C7.72949585,4 5.00101242,4.81374779 5,6.36947182 L5,8.88147587 C5,8.88147587 5.09213061,9.46070255 5.40092001,9.80742987 C5.08808091,10.1551681 5,10.4938084 5,10.7465251 L5,13.2625727 C5.00101242,13.510235 5.09213061,13.8438211 5.39788274,14.1875158 C5.08808091,14.5342431 5,14.8718726 5,15.1235785 L5,17.6335608 C5.00506212,19.1872631 7.7315207,20 10.4235568,20 C13.1186303,20 15.8471137,19.1862522 15.8471137,17.6305282 L15.8471137,15.1235785 L14.8346894,15.1235785 Z" id="Amazon-DynamoDB_Icon_16_Squid" fill="#FFFFFF"></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.7 KiB

View File

@ -0,0 +1,86 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ICommonObject } from '../../../src'
import { BufferMemory } from 'langchain/memory'
import { RedisChatMessageHistory, RedisChatMessageHistoryInput } from 'langchain/stores/message/redis'
import { createClient } from 'redis'
class RedisBackedChatMemory_Memory implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'Redis-Backed Chat Memory'
this.name = 'RedisBackedChatMemory'
this.type = 'RedisBackedChatMemory'
this.icon = 'redis.svg'
this.category = 'Memory'
this.description = 'Summarizes the conversation and stores the memory in Redis server'
this.baseClasses = [this.type, ...getBaseClasses(BufferMemory)]
this.inputs = [
{
label: 'Base URL',
name: 'baseURL',
type: 'string',
default: 'redis://localhost:6379'
},
{
label: 'Session Id',
name: 'sessionId',
type: 'string',
description: 'if empty, chatId will be used automatically',
default: '',
additionalParams: true,
optional: true
},
{
label: 'Session Timeouts',
name: 'sessionTTL',
type: 'number',
description: 'Omit this parameter to make sessions never expire',
optional: true
},
{
label: 'Memory Key',
name: 'memoryKey',
type: 'string',
default: 'chat_history'
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const baseURL = nodeData.inputs?.baseURL as string
const sessionId = nodeData.inputs?.sessionId as string
const sessionTTL = nodeData.inputs?.sessionTTL as number
const memoryKey = nodeData.inputs?.memoryKey as string
const chatId = options?.chatId as string
const redisClient = createClient({ url: baseURL })
let obj: RedisChatMessageHistoryInput = {
sessionId: sessionId ? sessionId : chatId,
client: redisClient
}
if (sessionTTL) {
obj = {
...obj,
sessionTTL
}
}
let redisChatMessageHistory = new RedisChatMessageHistory(obj)
let redis = new BufferMemory({ memoryKey, chatHistory: redisChatMessageHistory, returnMessages: true })
return redis
}
}
module.exports = { nodeClass: RedisBackedChatMemory_Memory }

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128" id="redis"><path fill="#A41E11" d="M121.8 93.1c-6.7 3.5-41.4 17.7-48.8 21.6-7.4 3.9-11.5 3.8-17.3 1s-42.7-17.6-49.4-20.8c-3.3-1.6-5-2.9-5-4.2v-12.7s48-10.5 55.8-13.2c7.8-2.8 10.4-2.9 17-.5s46.1 9.5 52.6 11.9v12.5c0 1.3-1.5 2.7-4.9 4.4z"></path><path fill="#D82C20" d="M121.8 80.5c-6.7 3.5-41.4 17.7-48.8 21.6-7.4 3.9-11.5 3.8-17.3 1-5.8-2.8-42.7-17.7-49.4-20.9-6.6-3.2-6.8-5.4-.3-7.9 6.5-2.6 43.2-17 51-19.7 7.8-2.8 10.4-2.9 17-.5s41.1 16.1 47.6 18.5c6.7 2.4 6.9 4.4.2 7.9z"></path><path fill="#A41E11" d="M121.8 72.5c-6.7 3.5-41.4 17.7-48.8 21.6-7.4 3.8-11.5 3.8-17.3 1-5.8-2.8-42.7-17.7-49.4-20.9-3.3-1.6-5-2.9-5-4.2v-12.7s48-10.5 55.8-13.2c7.8-2.8 10.4-2.9 17-.5s46.1 9.5 52.6 11.9v12.5c0 1.3-1.5 2.7-4.9 4.5z"></path><path fill="#D82C20" d="M121.8 59.8c-6.7 3.5-41.4 17.7-48.8 21.6-7.4 3.8-11.5 3.8-17.3 1-5.8-2.8-42.7-17.7-49.4-20.9s-6.8-5.4-.3-7.9c6.5-2.6 43.2-17 51-19.7 7.8-2.8 10.4-2.9 17-.5s41.1 16.1 47.6 18.5c6.7 2.4 6.9 4.4.2 7.9z"></path><path fill="#A41E11" d="M121.8 51c-6.7 3.5-41.4 17.7-48.8 21.6-7.4 3.8-11.5 3.8-17.3 1-5.8-2.7-42.7-17.6-49.4-20.8-3.3-1.6-5.1-2.9-5.1-4.2v-12.7s48-10.5 55.8-13.2c7.8-2.8 10.4-2.9 17-.5s46.1 9.5 52.6 11.9v12.5c.1 1.3-1.4 2.6-4.8 4.4z"></path><path fill="#D82C20" d="M121.8 38.3c-6.7 3.5-41.4 17.7-48.8 21.6-7.4 3.8-11.5 3.8-17.3 1s-42.7-17.6-49.4-20.8-6.8-5.4-.3-7.9c6.5-2.6 43.2-17 51-19.7 7.8-2.8 10.4-2.9 17-.5s41.1 16.1 47.6 18.5c6.7 2.4 6.9 4.4.2 7.8z"></path><path fill="#fff" d="M80.4 26.1l-10.8 1.2-2.5 5.8-3.9-6.5-12.5-1.1 9.3-3.4-2.8-5.2 8.8 3.4 8.2-2.7-2.2 5.4zM66.5 54.5l-20.3-8.4 29.1-4.4z"></path><ellipse cx="38.4" cy="35.4" fill="#fff" rx="15.5" ry="6"></ellipse><path fill="#7A0C00" d="M93.3 27.7l17.2 6.8-17.2 6.8z"></path><path fill="#AD2115" d="M74.3 35.3l19-7.6v13.6l-1.9.8z"></path></svg>

After

Width:  |  Height:  |  Size: 1.8 KiB

View File

@ -41,7 +41,8 @@ class ZepMemory_Memory implements INode {
type: 'string',
description: 'if empty, chatId will be used automatically',
default: '',
additionalParams: true
additionalParams: true,
optional: true
},
{
label: 'Auto Summary Template',

View File

@ -38,12 +38,7 @@ class ChatPromptTemplate_Prompts implements INode {
{
label: 'Format Prompt Values',
name: 'promptValues',
type: 'string',
rows: 4,
placeholder: `{
"input_language": "English",
"output_language": "French"
}`,
type: 'json',
optional: true,
acceptVariable: true,
list: true

View File

@ -1,5 +1,5 @@
import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface'
import { getBaseClasses, getInputVariables, returnJSONStr } from '../../../src/utils'
import { getBaseClasses, getInputVariables } from '../../../src/utils'
import { PromptTemplateInput } from 'langchain/prompts'
class PromptTemplate_Prompts implements INode {
@ -31,12 +31,7 @@ class PromptTemplate_Prompts implements INode {
{
label: 'Format Prompt Values',
name: 'promptValues',
type: 'string',
rows: 4,
placeholder: `{
"input_language": "English",
"output_language": "French"
}`,
type: 'json',
optional: true,
acceptVariable: true,
list: true
@ -46,12 +41,11 @@ class PromptTemplate_Prompts implements INode {
async init(nodeData: INodeData): Promise<any> {
const template = nodeData.inputs?.template as string
let promptValuesStr = nodeData.inputs?.promptValues as string
const promptValuesStr = nodeData.inputs?.promptValues as string
let promptValues: ICommonObject = {}
if (promptValuesStr) {
promptValuesStr = promptValuesStr.replace(/\s/g, '')
promptValues = JSON.parse(returnJSONStr(promptValuesStr))
promptValues = JSON.parse(promptValuesStr.replace(/\s/g, ''))
}
const inputVariables = getInputVariables(template)

View File

@ -1,4 +1,8 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-tool" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-subtask" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M7 10h3v-3l-3.5 -3.5a6 6 0 0 1 8 8l6 6a2 2 0 0 1 -3 3l-6 -6a6 6 0 0 1 -8 -8l3.5 3.5"></path>
<path d="M6 9l6 0"></path>
<path d="M4 5l4 0"></path>
<path d="M6 5v11a1 1 0 0 0 1 1h5"></path>
<path d="M12 7m0 1a1 1 0 0 1 1 -1h6a1 1 0 0 1 1 1v2a1 1 0 0 1 -1 1h-6a1 1 0 0 1 -1 -1z"></path>
<path d="M12 15m0 1a1 1 0 0 1 1 -1h6a1 1 0 0 1 1 1v2a1 1 0 0 1 -1 1h-6a1 1 0 0 1 -1 -1z"></path>
</svg>

Before

Width:  |  Height:  |  Size: 396 B

After

Width:  |  Height:  |  Size: 598 B

View File

@ -0,0 +1,108 @@
import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { DynamicStructuredTool } from './core'
import { z } from 'zod'
import { DataSource } from 'typeorm'
class CustomTool_Tools implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'Custom Tool'
this.name = 'customTool'
this.type = 'CustomTool'
this.icon = 'customtool.svg'
this.category = 'Tools'
this.description = `Use custom tool you've created in Flowise within chatflow`
this.inputs = [
{
label: 'Select Tool',
name: 'selectedTool',
type: 'asyncOptions',
loadMethod: 'listTools'
}
]
this.baseClasses = [this.type, 'Tool', ...getBaseClasses(DynamicStructuredTool)]
}
//@ts-ignore
loadMethods = {
async listTools(nodeData: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
const returnData: INodeOptionsValue[] = []
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
if (appDataSource === undefined || !appDataSource) {
return returnData
}
const tools = await appDataSource.getRepository(databaseEntities['Tool']).find()
for (let i = 0; i < tools.length; i += 1) {
const data = {
label: tools[i].name,
name: tools[i].id,
description: tools[i].description
} as INodeOptionsValue
returnData.push(data)
}
return returnData
}
}
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
const selectedToolId = nodeData.inputs?.selectedTool as string
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
try {
const tool = await appDataSource.getRepository(databaseEntities['Tool']).findOneBy({
id: selectedToolId
})
if (!tool) throw new Error(`Tool ${selectedToolId} not found`)
const obj = {
name: tool.name,
description: tool.description,
schema: z.object(convertSchemaToZod(tool.schema)),
code: tool.func
}
return new DynamicStructuredTool(obj)
} catch (e) {
throw new Error(e)
}
}
}
const convertSchemaToZod = (schema: string) => {
try {
const parsedSchema = JSON.parse(schema)
const zodObj: any = {}
for (const sch of parsedSchema) {
if (sch.type === 'string') {
if (sch.required) z.string({ required_error: `${sch.property} required` }).describe(sch.description)
zodObj[sch.property] = z.string().describe(sch.description)
} else if (sch.type === 'number') {
if (sch.required) z.number({ required_error: `${sch.property} required` }).describe(sch.description)
zodObj[sch.property] = z.number().describe(sch.description)
} else if (sch.type === 'boolean') {
if (sch.required) z.boolean({ required_error: `${sch.property} required` }).describe(sch.description)
zodObj[sch.property] = z.boolean().describe(sch.description)
}
}
return zodObj
} catch (e) {
throw new Error(e)
}
}
module.exports = { nodeClass: CustomTool_Tools }

View File

@ -0,0 +1,78 @@
import { z } from 'zod'
import { CallbackManagerForToolRun } from 'langchain/callbacks'
import { StructuredTool, ToolParams } from 'langchain/tools'
import { NodeVM } from 'vm2'
import { availableDependencies } from '../../../src/utils'
export interface BaseDynamicToolInput extends ToolParams {
name: string
description: string
code: string
returnDirect?: boolean
}
export interface DynamicStructuredToolInput<
// eslint-disable-next-line
T extends z.ZodObject<any, any, any, any> = z.ZodObject<any, any, any, any>
> extends BaseDynamicToolInput {
func?: (input: z.infer<T>, runManager?: CallbackManagerForToolRun) => Promise<string>
schema: T
}
export class DynamicStructuredTool<
// eslint-disable-next-line
T extends z.ZodObject<any, any, any, any> = z.ZodObject<any, any, any, any>
> extends StructuredTool {
name: string
description: string
code: string
func: DynamicStructuredToolInput['func']
schema: T
constructor(fields: DynamicStructuredToolInput<T>) {
super(fields)
this.name = fields.name
this.description = fields.description
this.code = fields.code
this.func = fields.func
this.returnDirect = fields.returnDirect ?? this.returnDirect
this.schema = fields.schema
}
protected async _call(arg: z.output<T>): Promise<string> {
let sandbox: any = {}
if (typeof arg === 'object' && Object.keys(arg).length) {
for (const item in arg) {
sandbox[`$${item}`] = arg[item]
}
}
const options = {
console: 'inherit',
sandbox,
require: {
external: false as boolean | { modules: string[] },
builtin: ['*']
}
} as any
const external = JSON.stringify(availableDependencies)
if (external) {
const deps = JSON.parse(external)
if (deps && deps.length) {
options.require.external = {
modules: deps
}
}
}
const vm = new NodeVM(options)
const response = await vm.run(`module.exports = async function() {${this.code}}()`, __dirname)
return response
}
}

View File

@ -0,0 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-tool" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M7 10h3v-3l-3.5 -3.5a6 6 0 0 1 8 8l6 6a2 2 0 0 1 -3 3l-6 -6a6 6 0 0 1 -8 -8l3.5 3.5"></path>
</svg>

After

Width:  |  Height:  |  Size: 396 B

View File

@ -0,0 +1,120 @@
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { QdrantClient } from '@qdrant/js-client-rest'
import { QdrantVectorStore, QdrantLibArgs } from 'langchain/vectorstores/qdrant'
import { Embeddings } from 'langchain/embeddings/base'
import { getBaseClasses } from '../../../src/utils'
class Qdrant_Existing_VectorStores implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
outputs: INodeOutputsValue[]
constructor() {
this.label = 'Qdrant Load Existing Index'
this.name = 'qdrantExistingIndex'
this.type = 'Qdrant'
this.icon = 'qdrant_logo.svg'
this.category = 'Vector Stores'
this.description = 'Load existing index from Qdrant (i.e., documents have been upserted)'
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
this.inputs = [
{
label: 'Embeddings',
name: 'embeddings',
type: 'Embeddings'
},
{
label: 'Qdrant Server URL',
name: 'qdrantServerUrl',
type: 'string',
placeholder: 'http://localhost:6333'
},
{
label: 'Qdrant Collection Name',
name: 'qdrantCollection',
type: 'string'
},
{
label: 'Qdrant API Key',
name: 'qdrantApiKey',
type: 'password',
optional: true
},
{
label: 'Qdrant Collection Cofiguration',
name: 'qdrantCollectionCofiguration',
type: 'json',
optional: true,
additionalParams: true
},
{
label: 'Top K',
name: 'topK',
description: 'Number of top results to fetch. Default to 4',
placeholder: '4',
type: 'number',
additionalParams: true,
optional: true
}
]
this.outputs = [
{
label: 'Qdrant Retriever',
name: 'retriever',
baseClasses: this.baseClasses
},
{
label: 'Qdrant Vector Store',
name: 'vectorStore',
baseClasses: [this.type, ...getBaseClasses(QdrantVectorStore)]
}
]
}
async init(nodeData: INodeData): Promise<any> {
const qdrantServerUrl = nodeData.inputs?.qdrantServerUrl as string
const collectionName = nodeData.inputs?.qdrantCollection as string
const qdrantApiKey = nodeData.inputs?.qdrantApiKey as string
let qdrantCollectionCofiguration = nodeData.inputs?.qdrantCollectionCofiguration
const embeddings = nodeData.inputs?.embeddings as Embeddings
const output = nodeData.outputs?.output as string
const topK = nodeData.inputs?.topK as string
const k = topK ? parseInt(topK, 10) : 4
// connect to Qdrant Cloud
const client = new QdrantClient({
url: qdrantServerUrl,
apiKey: qdrantApiKey
})
const dbConfig: QdrantLibArgs = {
client,
collectionName
}
if (qdrantCollectionCofiguration) {
qdrantCollectionCofiguration =
typeof qdrantCollectionCofiguration === 'object' ? qdrantCollectionCofiguration : JSON.parse(qdrantCollectionCofiguration)
dbConfig.collectionConfig = qdrantCollectionCofiguration
}
const vectorStore = await QdrantVectorStore.fromExistingCollection(embeddings, dbConfig)
if (output === 'retriever') {
const retriever = vectorStore.asRetriever(k)
return retriever
} else if (output === 'vectorStore') {
;(vectorStore as any).k = k
return vectorStore
}
return vectorStore
}
}
module.exports = { nodeClass: Qdrant_Existing_VectorStores }

View File

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="451.84" height="152.4" fill="none" version="1.1" viewBox="0 0 451.84 152.4" xmlns="http://www.w3.org/2000/svg">
<g fill="#dc244c">
<path d="m212.69 116c0 5.523-4.477 10.001-10 10.001h-6.836v-7.808h-0.244c-0.732 1.057-1.708 2.155-2.928 3.293-1.139 1.058-2.521 2.034-4.148 2.929-1.545 0.894-3.294 1.626-5.246 2.196-1.871 0.569-3.823 0.853-5.856 0.853-4.392 0-8.377-0.732-11.956-2.196-3.579-1.545-6.669-3.66-9.272-6.344-2.521-2.765-4.473-6.018-5.856-9.759-1.383-3.742-2.074-7.849-2.074-12.322 0-4.148 0.61-8.093 1.83-11.835 1.301-3.822 3.091-7.198 5.368-10.126 2.359-2.9275 5.205-5.2455 8.54-6.9535 3.335-1.7893 7.117-2.684 11.346-2.684 3.823 0 7.361 0.61 10.614 1.83 3.335 1.1387 6.059 3.1313 8.174 5.9785h0.244v-29.284c0-5.5229 4.477-10 10-10h8.3zm-16.836-19.646c0-4.473-1.301-8.092-3.904-10.858-2.521-2.765-6.1-4.148-10.736-4.148s-8.255 1.383-10.858 4.148c-2.521 2.766-3.782 6.385-3.782 10.858 0 4.474 1.261 8.093 3.782 10.858 2.603 2.766 6.222 4.149 10.858 4.149s8.215-1.383 10.736-4.149c2.603-2.765 3.904-6.384 3.904-10.858z"/>
<path d="m224.53 76.708c0-5.5223 4.477-9.9995 10-9.9995h8.3v9.5155h0.244c1.952-3.6595 4.27-6.3842 6.954-8.1735 2.684-1.8707 6.059-2.806 10.126-2.806 1.057 0 2.114 0.0407 3.172 0.122 1.057 0.0813 2.033 0.244 2.928 0.488v16.714c-1.302-0.407-2.603-0.692-3.904-0.855-1.22-0.244-2.522-0.366-3.904-0.366-3.498 0-6.263 0.488-8.296 1.464-2.034 0.976-3.62 2.359-4.758 4.148-1.058 1.708-1.749 3.782-2.074 6.222-0.326 2.441-0.488 5.124-0.488 8.052v14.766c0 5.523-4.477 10.001-10 10.001h-8.3z"/>
<path d="m310.64 118.56h-0.244c-2.033 3.172-4.758 5.449-8.174 6.832-3.334 1.382-6.872 2.073-10.614 2.073-2.765 0-5.449-0.406-8.052-1.219-2.521-0.732-4.758-1.871-6.71-3.416-1.952-1.546-3.497-3.457-4.636-5.735-1.138-2.277-1.708-4.92-1.708-7.929 0-3.416 0.61-6.304 1.83-8.662 1.302-2.359 3.01-4.311 5.124-5.856 2.196-1.546 4.677-2.725 7.442-3.538 2.766-0.895 5.612-1.546 8.54-1.953 3.01-0.406 5.978-0.65 8.906-0.732 3.01-0.081 5.775-0.121 8.296-0.121 0-3.254-1.179-5.816-3.538-7.687-2.277-1.952-5.002-2.928-8.174-2.928-3.009 0-5.774 0.651-8.296 1.952-2.44 1.221-4.636 2.929-6.588 5.124l-9.76-10.004c3.416-3.1715 7.402-5.5302 11.956-7.0755 4.555-1.6267 9.272-2.44 14.152-2.44 5.368 0 9.76 0.6913 13.176 2.074 3.498 1.3013 6.263 3.2533 8.296 5.8555 2.115 2.603 3.579 5.816 4.392 9.638 0.814 3.742 1.22 8.093 1.22 13.054v20.135c0 5.522-4.477 10-10 10h-6.836zm-4.514-18.545c-1.382 0-3.131 0.082-5.246 0.244-2.033 0.082-4.026 0.407-5.978 0.976-1.87 0.57-3.497 1.424-4.88 2.562-1.301 1.139-1.952 2.725-1.952 4.759 0 2.196 0.936 3.822 2.806 4.879 1.871 1.058 3.823 1.586 5.856 1.586 1.79 0 3.498-0.244 5.124-0.732 1.708-0.488 3.213-1.179 4.514-2.074 1.302-0.894 2.318-2.033 3.05-3.416 0.814-1.382 1.22-3.009 1.22-4.879v-3.905z"/>
<path d="m340.18 76.708c0-5.5223 4.477-9.9995 10-9.9995h7.568v8.0515h0.244c0.569-1.138 1.382-2.2768 2.44-3.4155 1.057-1.1387 2.318-2.1553 3.782-3.05s3.131-1.6267 5.002-2.196c1.87-0.5693 3.904-0.854 6.1-0.854 4.636 0 8.377 0.732 11.224 2.196 2.846 1.3827 5.042 3.3347 6.588 5.8555 1.626 2.522 2.724 5.49 3.294 8.906 0.569 3.416 0.854 7.117 0.854 11.103v22.695c0 5.523-4.477 10.001-10 10.001h-8.3v-29.037c0-1.708-0.082-3.456-0.244-5.246-0.082-1.87-0.448-3.578-1.098-5.123-0.57-1.546-1.505-2.807-2.806-3.783-1.22-0.976-3.01-1.464-5.368-1.464-2.359 0-4.27 0.448-5.734 1.342-1.464 0.814-2.603 1.952-3.416 3.416-0.732 1.383-1.22 2.969-1.464 4.758-0.244 1.79-0.366 3.66-0.366 5.612v19.524c0 5.523-4.477 10.001-10 10.001h-8.3z"/>
<path d="m451.84 71.348c0 5.5225-4.477 9.9995-10 9.9995h-6.104v19.765c0 1.626 0.082 3.131 0.244 4.513 0.163 1.302 0.529 2.44 1.098 3.416 0.57 0.976 1.424 1.749 2.562 2.319 1.22 0.488 2.806 0.731 4.758 0.731 0.976 0 2.237-0.081 3.782-0.244 1.627-0.244 2.847-0.731 3.66-1.463v8.724c0 3.915-2.452 7.557-6.344 7.989-2.196 0.244-4.351 0.366-6.466 0.366-3.09 0-5.937-0.325-8.54-0.976-2.602-0.65-4.88-1.667-6.832-3.049-1.952-1.464-3.497-3.335-4.636-5.613-1.057-2.277-1.586-5.042-1.586-8.295v-28.183h-11.712v-4.64c0-5.5224 4.478-9.9995 10-9.9995h1.712v-7.568c0-5.5229 4.478-10 10-10h8.3v17.568h16.104z"/>
</g>
<g clip-rule="evenodd" fill-rule="evenodd">
<path d="m103.79 140.09-3.0389-83.784-5.5036-22.089 36.735 3.8889v101.35l-22.44 12.951z" fill="#24386c"/>
<path d="m131.98 38.1-22.44 12.96-46.308-10.158-54.203 22.069-9.0306-24.871 32.99-19.05 33-19.05 32.991 19.05z" fill="#7589be"/>
<path d="m0 38.1 22.44 12.96 13.008 38.686 43.921 35.142-13.378 27.512-33-19.051-32.991-19.05v-76.2" fill="#b2bfe8"/>
<path d="m80.868 104.56-14.877 21.932v25.91l21.11-12.18 10.877-16.242" fill="#24386c"/>
<path d="m66 100.59-21.119-36.565 4.5489-12.119 17.293-8.3844 20.378 20.504z" fill="#7589be"/>
<path d="m44.881 64.022 21.11 12.18v24.38l-19.524 0.84001-11.81-15.08 10.224-22.32" fill="#b2bfe8"/>
<path d="m65.991 76.2 21.11-12.179 14.367 23.922-17.386 14.365-18.091-1.7272z" fill="#24386c"/>
<path d="m87.101 140.22 22.44 12.181v-101.34l-21.78-12.57-21.77-12.57-21.78 12.57-21.77 12.57v50.289l21.77 12.57 21.78 12.571 21.11-12.191zm0-51.83-21.11 12.19-21.11-12.19v-24.37l21.11-12.19 21.11 12.19v24.37" fill="#dc244c"/>
</g>
<path d="m66 126.5v-25.914l-21-12.086v25.871z" fill="url(#paint0_linear_425_56)"/>
<defs>
<linearGradient id="paint0_linear_425_56" x1="62.128" x2="41.202" y1="105.54" y2="105.54" gradientUnits="userSpaceOnUse">
<stop stop-color="#FF3364" offset="0"/>
<stop stop-color="#C91540" stop-opacity="0" offset="1"/>
</linearGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 5.4 KiB

View File

@ -0,0 +1,121 @@
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { QdrantClient } from '@qdrant/js-client-rest'
import { QdrantVectorStore, QdrantLibArgs } from 'langchain/vectorstores/qdrant'
import { Embeddings } from 'langchain/embeddings/base'
import { Document } from 'langchain/document'
import { getBaseClasses } from '../../../src/utils'
import { flatten } from 'lodash'
class QdrantUpsert_VectorStores implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
outputs: INodeOutputsValue[]
constructor() {
this.label = 'Qdrant Upsert Document'
this.name = 'qdrantUpsert'
this.type = 'Qdrant'
this.icon = 'qdrant_logo.svg'
this.category = 'Vector Stores'
this.description = 'Upsert documents to Qdrant'
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
this.inputs = [
{
label: 'Document',
name: 'document',
type: 'Document',
list: true
},
{
label: 'Embeddings',
name: 'embeddings',
type: 'Embeddings'
},
{
label: 'Qdrant Server URL',
name: 'qdrantServerUrl',
type: 'string',
placeholder: 'http://localhost:6333'
},
{
label: 'Qdrant Collection Name',
name: 'qdrantCollection',
type: 'string'
},
{
label: 'Qdrant API Key',
name: 'qdrantApiKey',
type: 'password',
optional: true
},
{
label: 'Top K',
name: 'topK',
description: 'Number of top results to fetch. Default to 4',
placeholder: '4',
type: 'number',
additionalParams: true,
optional: true
}
]
this.outputs = [
{
label: 'Qdrant Retriever',
name: 'retriever',
baseClasses: this.baseClasses
},
{
label: 'Qdrant Vector Store',
name: 'vectorStore',
baseClasses: [this.type, ...getBaseClasses(QdrantVectorStore)]
}
]
}
async init(nodeData: INodeData): Promise<any> {
const qdrantServerUrl = nodeData.inputs?.qdrantServerUrl as string
const collectionName = nodeData.inputs?.qdrantCollection as string
const qdrantApiKey = nodeData.inputs?.qdrantApiKey as string
const docs = nodeData.inputs?.document as Document[]
const embeddings = nodeData.inputs?.embeddings as Embeddings
const output = nodeData.outputs?.output as string
const topK = nodeData.inputs?.topK as string
const k = topK ? parseInt(topK, 10) : 4
// connect to Qdrant Cloud
const client = new QdrantClient({
url: qdrantServerUrl,
apiKey: qdrantApiKey
})
const flattenDocs = docs && docs.length ? flatten(docs) : []
const finalDocs = []
for (let i = 0; i < flattenDocs.length; i += 1) {
finalDocs.push(new Document(flattenDocs[i]))
}
const dbConfig: QdrantLibArgs = {
client,
url: qdrantServerUrl,
collectionName
}
const vectorStore = await QdrantVectorStore.fromDocuments(finalDocs, embeddings, dbConfig)
if (output === 'retriever') {
const retriever = vectorStore.asRetriever(k)
return retriever
} else if (output === 'vectorStore') {
;(vectorStore as any).k = k
return vectorStore
}
return vectorStore
}
}
module.exports = { nodeClass: QdrantUpsert_VectorStores }

View File

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="451.84" height="152.4" fill="none" version="1.1" viewBox="0 0 451.84 152.4" xmlns="http://www.w3.org/2000/svg">
<g fill="#dc244c">
<path d="m212.69 116c0 5.523-4.477 10.001-10 10.001h-6.836v-7.808h-0.244c-0.732 1.057-1.708 2.155-2.928 3.293-1.139 1.058-2.521 2.034-4.148 2.929-1.545 0.894-3.294 1.626-5.246 2.196-1.871 0.569-3.823 0.853-5.856 0.853-4.392 0-8.377-0.732-11.956-2.196-3.579-1.545-6.669-3.66-9.272-6.344-2.521-2.765-4.473-6.018-5.856-9.759-1.383-3.742-2.074-7.849-2.074-12.322 0-4.148 0.61-8.093 1.83-11.835 1.301-3.822 3.091-7.198 5.368-10.126 2.359-2.9275 5.205-5.2455 8.54-6.9535 3.335-1.7893 7.117-2.684 11.346-2.684 3.823 0 7.361 0.61 10.614 1.83 3.335 1.1387 6.059 3.1313 8.174 5.9785h0.244v-29.284c0-5.5229 4.477-10 10-10h8.3zm-16.836-19.646c0-4.473-1.301-8.092-3.904-10.858-2.521-2.765-6.1-4.148-10.736-4.148s-8.255 1.383-10.858 4.148c-2.521 2.766-3.782 6.385-3.782 10.858 0 4.474 1.261 8.093 3.782 10.858 2.603 2.766 6.222 4.149 10.858 4.149s8.215-1.383 10.736-4.149c2.603-2.765 3.904-6.384 3.904-10.858z"/>
<path d="m224.53 76.708c0-5.5223 4.477-9.9995 10-9.9995h8.3v9.5155h0.244c1.952-3.6595 4.27-6.3842 6.954-8.1735 2.684-1.8707 6.059-2.806 10.126-2.806 1.057 0 2.114 0.0407 3.172 0.122 1.057 0.0813 2.033 0.244 2.928 0.488v16.714c-1.302-0.407-2.603-0.692-3.904-0.855-1.22-0.244-2.522-0.366-3.904-0.366-3.498 0-6.263 0.488-8.296 1.464-2.034 0.976-3.62 2.359-4.758 4.148-1.058 1.708-1.749 3.782-2.074 6.222-0.326 2.441-0.488 5.124-0.488 8.052v14.766c0 5.523-4.477 10.001-10 10.001h-8.3z"/>
<path d="m310.64 118.56h-0.244c-2.033 3.172-4.758 5.449-8.174 6.832-3.334 1.382-6.872 2.073-10.614 2.073-2.765 0-5.449-0.406-8.052-1.219-2.521-0.732-4.758-1.871-6.71-3.416-1.952-1.546-3.497-3.457-4.636-5.735-1.138-2.277-1.708-4.92-1.708-7.929 0-3.416 0.61-6.304 1.83-8.662 1.302-2.359 3.01-4.311 5.124-5.856 2.196-1.546 4.677-2.725 7.442-3.538 2.766-0.895 5.612-1.546 8.54-1.953 3.01-0.406 5.978-0.65 8.906-0.732 3.01-0.081 5.775-0.121 8.296-0.121 0-3.254-1.179-5.816-3.538-7.687-2.277-1.952-5.002-2.928-8.174-2.928-3.009 0-5.774 0.651-8.296 1.952-2.44 1.221-4.636 2.929-6.588 5.124l-9.76-10.004c3.416-3.1715 7.402-5.5302 11.956-7.0755 4.555-1.6267 9.272-2.44 14.152-2.44 5.368 0 9.76 0.6913 13.176 2.074 3.498 1.3013 6.263 3.2533 8.296 5.8555 2.115 2.603 3.579 5.816 4.392 9.638 0.814 3.742 1.22 8.093 1.22 13.054v20.135c0 5.522-4.477 10-10 10h-6.836zm-4.514-18.545c-1.382 0-3.131 0.082-5.246 0.244-2.033 0.082-4.026 0.407-5.978 0.976-1.87 0.57-3.497 1.424-4.88 2.562-1.301 1.139-1.952 2.725-1.952 4.759 0 2.196 0.936 3.822 2.806 4.879 1.871 1.058 3.823 1.586 5.856 1.586 1.79 0 3.498-0.244 5.124-0.732 1.708-0.488 3.213-1.179 4.514-2.074 1.302-0.894 2.318-2.033 3.05-3.416 0.814-1.382 1.22-3.009 1.22-4.879v-3.905z"/>
<path d="m340.18 76.708c0-5.5223 4.477-9.9995 10-9.9995h7.568v8.0515h0.244c0.569-1.138 1.382-2.2768 2.44-3.4155 1.057-1.1387 2.318-2.1553 3.782-3.05s3.131-1.6267 5.002-2.196c1.87-0.5693 3.904-0.854 6.1-0.854 4.636 0 8.377 0.732 11.224 2.196 2.846 1.3827 5.042 3.3347 6.588 5.8555 1.626 2.522 2.724 5.49 3.294 8.906 0.569 3.416 0.854 7.117 0.854 11.103v22.695c0 5.523-4.477 10.001-10 10.001h-8.3v-29.037c0-1.708-0.082-3.456-0.244-5.246-0.082-1.87-0.448-3.578-1.098-5.123-0.57-1.546-1.505-2.807-2.806-3.783-1.22-0.976-3.01-1.464-5.368-1.464-2.359 0-4.27 0.448-5.734 1.342-1.464 0.814-2.603 1.952-3.416 3.416-0.732 1.383-1.22 2.969-1.464 4.758-0.244 1.79-0.366 3.66-0.366 5.612v19.524c0 5.523-4.477 10.001-10 10.001h-8.3z"/>
<path d="m451.84 71.348c0 5.5225-4.477 9.9995-10 9.9995h-6.104v19.765c0 1.626 0.082 3.131 0.244 4.513 0.163 1.302 0.529 2.44 1.098 3.416 0.57 0.976 1.424 1.749 2.562 2.319 1.22 0.488 2.806 0.731 4.758 0.731 0.976 0 2.237-0.081 3.782-0.244 1.627-0.244 2.847-0.731 3.66-1.463v8.724c0 3.915-2.452 7.557-6.344 7.989-2.196 0.244-4.351 0.366-6.466 0.366-3.09 0-5.937-0.325-8.54-0.976-2.602-0.65-4.88-1.667-6.832-3.049-1.952-1.464-3.497-3.335-4.636-5.613-1.057-2.277-1.586-5.042-1.586-8.295v-28.183h-11.712v-4.64c0-5.5224 4.478-9.9995 10-9.9995h1.712v-7.568c0-5.5229 4.478-10 10-10h8.3v17.568h16.104z"/>
</g>
<g clip-rule="evenodd" fill-rule="evenodd">
<path d="m103.79 140.09-3.0389-83.784-5.5036-22.089 36.735 3.8889v101.35l-22.44 12.951z" fill="#24386c"/>
<path d="m131.98 38.1-22.44 12.96-46.308-10.158-54.203 22.069-9.0306-24.871 32.99-19.05 33-19.05 32.991 19.05z" fill="#7589be"/>
<path d="m0 38.1 22.44 12.96 13.008 38.686 43.921 35.142-13.378 27.512-33-19.051-32.991-19.05v-76.2" fill="#b2bfe8"/>
<path d="m80.868 104.56-14.877 21.932v25.91l21.11-12.18 10.877-16.242" fill="#24386c"/>
<path d="m66 100.59-21.119-36.565 4.5489-12.119 17.293-8.3844 20.378 20.504z" fill="#7589be"/>
<path d="m44.881 64.022 21.11 12.18v24.38l-19.524 0.84001-11.81-15.08 10.224-22.32" fill="#b2bfe8"/>
<path d="m65.991 76.2 21.11-12.179 14.367 23.922-17.386 14.365-18.091-1.7272z" fill="#24386c"/>
<path d="m87.101 140.22 22.44 12.181v-101.34l-21.78-12.57-21.77-12.57-21.78 12.57-21.77 12.57v50.289l21.77 12.57 21.78 12.571 21.11-12.191zm0-51.83-21.11 12.19-21.11-12.19v-24.37l21.11-12.19 21.11 12.19v24.37" fill="#dc244c"/>
</g>
<path d="m66 126.5v-25.914l-21-12.086v25.871z" fill="url(#paint0_linear_425_56)"/>
<defs>
<linearGradient id="paint0_linear_425_56" x1="62.128" x2="41.202" y1="105.54" y2="105.54" gradientUnits="userSpaceOnUse">
<stop stop-color="#FF3364" offset="0"/>
<stop stop-color="#C91540" stop-opacity="0" offset="1"/>
</linearGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 5.4 KiB

View File

@ -1,6 +1,6 @@
{
"name": "flowise-components",
"version": "1.2.13",
"version": "1.2.15",
"description": "Flowiseai Components",
"main": "dist/src/index",
"types": "dist/src/index.d.ts",
@ -16,10 +16,12 @@
},
"license": "SEE LICENSE IN LICENSE.md",
"dependencies": {
"@aws-sdk/client-dynamodb": "^3.360.0",
"@dqbd/tiktoken": "^1.0.7",
"@getzep/zep-js": "^0.3.1",
"@huggingface/inference": "1",
"@pinecone-database/pinecone": "^0.0.12",
"@qdrant/js-client-rest": "^1.2.2",
"@supabase/supabase-js": "^2.21.0",
"@types/js-yaml": "^4.0.5",
"axios": "^0.27.2",
@ -29,11 +31,11 @@
"d3-dsv": "2",
"dotenv": "^16.0.0",
"express": "^4.17.3",
"faiss-node": "^0.2.1",
"faiss-node": "^0.2.2",
"form-data": "^4.0.0",
"graphql": "^16.6.0",
"html-to-text": "^9.0.5",
"langchain": "^0.0.94",
"langchain": "^0.0.96",
"linkifyjs": "^4.1.1",
"mammoth": "^1.5.1",
"moment": "^2.29.3",
@ -42,7 +44,9 @@
"pdfjs-dist": "^3.7.107",
"playwright": "^1.35.0",
"puppeteer": "^20.7.1",
"redis": "^4.6.7",
"srt-parser-2": "^1.2.3",
"vm2": "^3.9.19",
"weaviate-ts-client": "^1.1.0",
"ws": "^8.9.0"
},

View File

@ -2,7 +2,18 @@
* Types
*/
export type NodeParamsType = 'options' | 'string' | 'number' | 'boolean' | 'password' | 'json' | 'code' | 'date' | 'file' | 'folder'
export type NodeParamsType =
| 'asyncOptions'
| 'options'
| 'string'
| 'number'
| 'boolean'
| 'password'
| 'json'
| 'code'
| 'date'
| 'file'
| 'folder'
export type CommonType = string | number | boolean | undefined | null
@ -16,6 +27,10 @@ export interface ICommonObject {
[key: string]: any | CommonType | ICommonObject | CommonType[] | ICommonObject[]
}
export type IDatabaseEntity = {
[key: string]: any
}
export interface IAttachment {
content: string
contentType: string
@ -50,6 +65,7 @@ export interface INodeParams {
placeholder?: string
fileType?: string
additionalParams?: boolean
loadMethod?: string
}
export interface INodeExecutionData {
@ -74,6 +90,9 @@ export interface INodeProperties {
export interface INode extends INodeProperties {
inputs?: INodeParams[]
output?: INodeOutputsValue[]
loadMethods?: {
[key: string]: (nodeData: INodeData, options?: ICommonObject) => Promise<INodeOptionsValue[]>
}
init?(nodeData: INodeData, input: string, options?: ICommonObject): Promise<any>
run?(nodeData: INodeData, input: string, options?: ICommonObject): Promise<string | ICommonObject>
}
@ -83,6 +102,7 @@ export interface INodeData extends INodeProperties {
inputs?: ICommonObject
outputs?: ICommonObject
instance?: any
loadMethod?: string // method to load async options
}
export interface IMessage {

View File

@ -18,6 +18,7 @@ export const notEmptyRegex = '(.|\\s)*\\S(.|\\s)*' //return true if string is no
*/
export const getBaseClasses = (targetClass: any) => {
const baseClasses: string[] = []
const skipClassNames = ['BaseLangChain', 'Serializable']
if (targetClass instanceof Function) {
let baseClass = targetClass
@ -26,7 +27,7 @@ export const getBaseClasses = (targetClass: any) => {
const newBaseClass = Object.getPrototypeOf(baseClass)
if (newBaseClass && newBaseClass !== Object && newBaseClass.name) {
baseClass = newBaseClass
baseClasses.push(baseClass.name)
if (!skipClassNames.includes(baseClass.name)) baseClasses.push(baseClass.name)
} else {
break
}
@ -244,43 +245,30 @@ export class CustomChainHandler extends BaseCallbackHandler {
}
}
export const returnJSONStr = (jsonStr: string): string => {
let jsonStrArray = jsonStr.split(':')
let wholeString = ''
for (let i = 0; i < jsonStrArray.length; i++) {
if (jsonStrArray[i].includes(',') && jsonStrArray[i + 1] !== undefined) {
const splitValueAndTitle = jsonStrArray[i].split(',')
const value = splitValueAndTitle[0]
const newTitle = splitValueAndTitle[1]
wholeString += handleEscapeDoubleQuote(value) + ',' + newTitle + ':'
} else {
wholeString += wholeString === '' ? jsonStrArray[i] + ':' : handleEscapeDoubleQuote(jsonStrArray[i])
}
}
return wholeString
}
const handleEscapeDoubleQuote = (value: string): string => {
let newValue = ''
if (value.includes('"')) {
const valueArray = value.split('"')
for (let i = 0; i < valueArray.length; i++) {
if ((i + 1) % 2 !== 0) {
switch (valueArray[i]) {
case '':
newValue += '"'
break
case '}':
newValue += '"}'
break
default:
newValue += '\\"' + valueArray[i] + '\\"'
}
} else {
newValue += valueArray[i]
}
}
}
return newValue === '' ? value : newValue
}
export const availableDependencies = [
'@dqbd/tiktoken',
'@getzep/zep-js',
'@huggingface/inference',
'@pinecone-database/pinecone',
'@supabase/supabase-js',
'axios',
'cheerio',
'chromadb',
'cohere-ai',
'd3-dsv',
'form-data',
'graphql',
'html-to-text',
'langchain',
'linkifyjs',
'mammoth',
'moment',
'node-fetch',
'pdf-parse',
'pdfjs-dist',
'playwright',
'puppeteer',
'srt-parser-2',
'typeorm',
'weaviate-ts-client'
]

View File

@ -1,4 +1,7 @@
PORT=3000
# FLOWISE_USERNAME=user
# FLOWISE_PASSWORD=1234
# DEBUG=true
# DATABASE_PATH=/your_database_path/.flowise
# APIKEY_PATH=/your_api_key_path/.flowise
# EXECUTION_MODE=child or main

View File

@ -1,10 +1,10 @@
<!-- markdownlint-disable MD030 -->
# Flowise - LangchainJS UI
# Flowise - Low-Code LLM apps builder
![Flowise](https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.gif?raw=true)
Drag & drop UI to build your customized LLM flow using [LangchainJS](https://github.com/hwchase17/langchainjs)
Drag & drop UI to build your customized LLM flow
## ⚡Quick Start
@ -29,16 +29,34 @@ FLOWISE_USERNAME=user
FLOWISE_PASSWORD=1234
```
## 🔎 Debugging
You can set `DEBUG=true` to the `.env` file. Refer [here](https://docs.flowiseai.com/environment-variables) for full list of env variables
## 📖 Documentation
Coming Soon
## 💻 Cloud Hosted
Coming Soon
[Flowise Docs](https://docs.flowiseai.com/)
## 🌐 Self Host
### [Railway](https://docs.flowiseai.com/deployment/railway)
[![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/template/YK7J0v)
### [Render](https://docs.flowiseai.com/deployment/render)
[![Deploy to Render](https://render.com/images/deploy-to-render-button.svg)](https://docs.flowiseai.com/deployment/render)
### [AWS](https://docs.flowiseai.com/deployment/aws)
### [Azure](https://docs.flowiseai.com/deployment/azure)
### [DigitalOcean](https://docs.flowiseai.com/deployment/digital-ocean)
### [GCP](https://docs.flowiseai.com/deployment/gcp)
## 💻 Cloud Hosted
Coming Soon
## 🙋 Support

View File

@ -3,68 +3,7 @@
"nodes": [
{
"width": 300,
"height": 534,
"id": "promptTemplate_1",
"position": {
"x": 532.2791692529131,
"y": -31.128527027841372
},
"type": "customNode",
"data": {
"id": "promptTemplate_1",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 4,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_1-input-template-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 4,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "promptTemplate_1-input-promptValues-string"
}
],
"inputAnchors": [],
"inputs": {
"template": "Word: {word}\\nAntonym: {antonym}\\n",
"promptValues": ""
},
"outputAnchors": [
{
"id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 532.2791692529131,
"y": -31.128527027841372
},
"dragging": false
},
{
"width": 300,
"height": 956,
"height": 955,
"id": "fewShotPromptTemplate_1",
"position": {
"x": 886.3229032369354,
@ -139,7 +78,7 @@
],
"inputs": {
"examples": "[\n { \"word\": \"happy\", \"antonym\": \"sad\" },\n { \"word\": \"tall\", \"antonym\": \"short\" }\n]",
"examplePrompt": "{{promptTemplate_1.data.instance}}",
"examplePrompt": "{{promptTemplate_0.data.instance}}",
"prefix": "Give the antonym of every input",
"suffix": "Word: {input}\\nAntonym:",
"exampleSeparator": "\\n\\n",
@ -165,7 +104,7 @@
},
{
"width": 300,
"height": 526,
"height": 524,
"id": "openAI_1",
"position": {
"x": 1224.5139327142097,
@ -318,7 +257,7 @@
},
{
"width": 300,
"height": 407,
"height": 405,
"id": "llmChain_1",
"position": {
"x": 1635.363191180743,
@ -375,10 +314,10 @@
"type": "LLMChain | BaseChain | BaseLangChain"
},
{
"id": "llmChain_1-output-outputPrediction-string",
"id": "llmChain_1-output-outputPrediction-string|json",
"name": "outputPrediction",
"label": "Output Prediction",
"type": "string"
"type": "string | json"
}
],
"default": "llmChain"
@ -395,20 +334,68 @@
},
"selected": false,
"dragging": false
},
{
"width": 300,
"height": 475,
"id": "promptTemplate_0",
"position": {
"x": 540.0140796251119,
"y": -33.31673494170347
},
"type": "customNode",
"data": {
"id": "promptTemplate_0",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 4,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_0-input-template-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "json",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "promptTemplate_0-input-promptValues-json"
}
],
"inputAnchors": [],
"inputs": {
"template": "Word: {word}\\nAntonym: {antonym}\\n",
"promptValues": ""
},
"outputAnchors": [
{
"id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 540.0140796251119,
"y": -33.31673494170347
},
"dragging": false
}
],
"edges": [
{
"source": "promptTemplate_1",
"sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "fewShotPromptTemplate_1",
"targetHandle": "fewShotPromptTemplate_1-input-examplePrompt-PromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-fewShotPromptTemplate_1-fewShotPromptTemplate_1-input-examplePrompt-PromptTemplate",
"data": {
"label": ""
}
},
{
"source": "openAI_1",
"sourceHandle": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain",
@ -430,6 +417,17 @@
"data": {
"label": ""
}
},
{
"source": "promptTemplate_0",
"sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "fewShotPromptTemplate_1",
"targetHandle": "fewShotPromptTemplate_1-input-examplePrompt-PromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-fewShotPromptTemplate_1-fewShotPromptTemplate_1-input-examplePrompt-PromptTemplate",
"data": {
"label": ""
}
}
]
}

View File

@ -1,67 +1,6 @@
{
"description": "Simple LLM Chain using HuggingFace Inference API on falcon-7b-instruct model",
"nodes": [
{
"width": 300,
"height": 532,
"id": "promptTemplate_1",
"position": {
"x": 514.5434056794296,
"y": 507.47798128037107
},
"type": "customNode",
"data": {
"id": "promptTemplate_1",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 4,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_1-input-template-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 4,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "promptTemplate_1-input-promptValues-string"
}
],
"inputAnchors": [],
"inputs": {
"template": "Question: {question}\n\nAnswer: Let's think step by step.",
"promptValues": ""
},
"outputAnchors": [
{
"id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 514.5434056794296,
"y": 507.47798128037107
},
"dragging": false
},
{
"width": 300,
"height": 405,
@ -105,7 +44,7 @@
],
"inputs": {
"model": "{{huggingFaceInference_LLMs_0.data.instance}}",
"prompt": "{{promptTemplate_1.data.instance}}",
"prompt": "{{promptTemplate_0.data.instance}}",
"chainName": ""
},
"outputAnchors": [
@ -121,10 +60,10 @@
"type": "LLMChain | BaseChain | BaseLangChain"
},
{
"id": "llmChain_1-output-outputPrediction-string",
"id": "llmChain_1-output-outputPrediction-string|json",
"name": "outputPrediction",
"label": "Output Prediction",
"type": "string"
"type": "string | json"
}
],
"default": "llmChain"
@ -144,7 +83,7 @@
},
{
"width": 300,
"height": 427,
"height": 429,
"id": "huggingFaceInference_LLMs_0",
"position": {
"x": 503.5630827259226,
@ -245,20 +184,68 @@
"y": 50.79125094823999
},
"dragging": false
},
{
"width": 300,
"height": 475,
"id": "promptTemplate_0",
"position": {
"x": 506.50436294210306,
"y": 504.50766458127396
},
"type": "customNode",
"data": {
"id": "promptTemplate_0",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 4,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_0-input-template-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "json",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "promptTemplate_0-input-promptValues-json"
}
],
"inputAnchors": [],
"inputs": {
"template": "Question: {question}\n\nAnswer: Let's think step by step.",
"promptValues": ""
},
"outputAnchors": [
{
"id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 506.50436294210306,
"y": 504.50766458127396
},
"dragging": false
}
],
"edges": [
{
"source": "promptTemplate_1",
"sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_1",
"targetHandle": "llmChain_1-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}
},
{
"source": "huggingFaceInference_LLMs_0",
"sourceHandle": "huggingFaceInference_LLMs_0-output-huggingFaceInference_LLMs-HuggingFaceInference|LLM|BaseLLM|BaseLanguageModel|BaseLangChain",
@ -269,6 +256,17 @@
"data": {
"label": ""
}
},
{
"source": "promptTemplate_0",
"sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_1",
"targetHandle": "llmChain_1-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}
}
]
}

View File

@ -6,8 +6,8 @@
"height": 524,
"id": "chatOpenAI_0",
"position": {
"x": 373.8366297840716,
"y": 448.58765780622326
"x": 648.7470970481406,
"y": 462.3331811694268
},
"type": "customNode",
"data": {
@ -34,33 +34,29 @@
"label": "gpt-4",
"name": "gpt-4"
},
{
"label": "gpt-4-0314",
"name": "gpt-4-0314"
},
{
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-4-32k",
"name": "gpt-4-32k"
},
{
"label": "gpt-4-32k-0613",
"name": "gpt-4-32k-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
},
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
},
{
"label": "gpt-3.5-turbo-16k",
"name": "gpt-3.5-turbo-16k"
},
{
"label": "gpt-3.5-turbo-16k-0613",
"name": "gpt-3.5-turbo-16k-0613"
}
],
"default": "gpt-3.5-turbo",
@ -148,64 +144,8 @@
},
"selected": false,
"positionAbsolute": {
"x": 373.8366297840716,
"y": 448.58765780622326
},
"dragging": false
},
{
"width": 300,
"height": 280,
"id": "openAIFunctionAgent_0",
"position": {
"x": 1084.5405852317417,
"y": 384.4653768834282
},
"type": "customNode",
"data": {
"id": "openAIFunctionAgent_0",
"label": "OpenAI Function Agent",
"name": "openAIFunctionAgent",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain", "Serializable"],
"category": "Agents",
"description": "An agent that uses OpenAI's Function Calling functionality to pick the tool and args to call",
"inputParams": [],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "openAIFunctionAgent_0-input-tools-Tool"
},
{
"label": "OpenAI Chat Model",
"name": "model",
"description": "Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer <a target=\"_blank\" href=\"https://platform.openai.com/docs/guides/gpt/function-calling\">docs</a> for more info",
"type": "BaseChatModel",
"id": "openAIFunctionAgent_0-input-model-BaseChatModel"
}
],
"inputs": {
"tools": ["{{calculator_0.data.instance}}", "{{serper_0.data.instance}}"],
"model": "{{chatOpenAI_0.data.instance}}"
},
"outputAnchors": [
{
"id": "openAIFunctionAgent_0-output-openAIFunctionAgent-AgentExecutor|BaseChain|BaseLangChain|Serializable",
"name": "openAIFunctionAgent",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain | BaseLangChain | Serializable"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1084.5405852317417,
"y": 384.4653768834282
"x": 648.7470970481406,
"y": 462.3331811694268
},
"dragging": false
},
@ -214,8 +154,8 @@
"height": 278,
"id": "serper_0",
"position": {
"x": 691.7580226065319,
"y": 34.00444633899792
"x": 486.27248799490576,
"y": 4.465900738576664
},
"type": "customNode",
"data": {
@ -249,8 +189,8 @@
},
"selected": false,
"positionAbsolute": {
"x": 691.7580226065319,
"y": 34.00444633899792
"x": 486.27248799490576,
"y": 4.465900738576664
},
"dragging": false
},
@ -259,8 +199,8 @@
"height": 143,
"id": "calculator_0",
"position": {
"x": 341.63347110886497,
"y": 261.6753474034481
"x": 286.4092336819905,
"y": 304.05673891709597
},
"type": "customNode",
"data": {
@ -287,20 +227,198 @@
},
"selected": false,
"positionAbsolute": {
"x": 341.63347110886497,
"y": 261.6753474034481
"x": 286.4092336819905,
"y": 304.05673891709597
},
"dragging": false
},
{
"width": 300,
"height": 383,
"id": "openAIFunctionAgent_0",
"position": {
"x": 1341.2259105169032,
"y": 318.35651549722945
},
"type": "customNode",
"data": {
"id": "openAIFunctionAgent_0",
"label": "OpenAI Function Agent",
"name": "openAIFunctionAgent",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain"],
"category": "Agents",
"description": "An agent that uses OpenAI's Function Calling functionality to pick the tool and args to call",
"inputParams": [
{
"label": "System Message",
"name": "systemMessage",
"type": "string",
"rows": 4,
"optional": true,
"additionalParams": true,
"id": "openAIFunctionAgent_0-input-systemMessage-string"
}
],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "openAIFunctionAgent_0-input-tools-Tool"
},
{
"label": "Memory",
"name": "memory",
"type": "BaseChatMemory",
"id": "openAIFunctionAgent_0-input-memory-BaseChatMemory"
},
{
"label": "OpenAI Chat Model",
"name": "model",
"description": "Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer <a target=\"_blank\" href=\"https://platform.openai.com/docs/guides/gpt/function-calling\">docs</a> for more info",
"type": "BaseChatModel",
"id": "openAIFunctionAgent_0-input-model-BaseChatModel"
}
],
"inputs": {
"tools": ["{{serper_0.data.instance}}", "{{calculator_0.data.instance}}", "{{customTool_0.data.instance}}"],
"memory": "{{bufferMemory_0.data.instance}}",
"model": "{{chatOpenAI_0.data.instance}}",
"systemMessage": ""
},
"outputAnchors": [
{
"id": "openAIFunctionAgent_0-output-openAIFunctionAgent-AgentExecutor|BaseChain",
"name": "openAIFunctionAgent",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1341.2259105169032,
"y": 318.35651549722945
},
"dragging": false
},
{
"width": 300,
"height": 376,
"id": "bufferMemory_0",
"position": {
"x": 285.7750469157585,
"y": 465.1140427303788
},
"type": "customNode",
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
"description": "Remembers previous conversational back and forths directly",
"inputParams": [
{
"label": "Memory Key",
"name": "memoryKey",
"type": "string",
"default": "chat_history",
"id": "bufferMemory_0-input-memoryKey-string"
},
{
"label": "Input Key",
"name": "inputKey",
"type": "string",
"default": "input",
"id": "bufferMemory_0-input-inputKey-string"
}
],
"inputAnchors": [],
"inputs": {
"memoryKey": "chat_history",
"inputKey": "input"
},
"outputAnchors": [
{
"id": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
"name": "bufferMemory",
"label": "BufferMemory",
"type": "BufferMemory | BaseChatMemory | BaseMemory"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 285.7750469157585,
"y": 465.1140427303788
},
"dragging": false
},
{
"width": 300,
"height": 277,
"id": "customTool_0",
"position": {
"x": 883.9529939431576,
"y": -32.32503903826486
},
"type": "customNode",
"data": {
"id": "customTool_0",
"label": "Custom Tool",
"name": "customTool",
"type": "CustomTool",
"baseClasses": ["CustomTool", "Tool", "StructuredTool"],
"category": "Tools",
"description": "Use custom tool you've created in Flowise within chatflow",
"inputParams": [
{
"label": "Select Tool",
"name": "selectedTool",
"type": "asyncOptions",
"loadMethod": "listTools",
"id": "customTool_0-input-selectedTool-asyncOptions"
}
],
"inputAnchors": [],
"inputs": {
"selectedTool": ""
},
"outputAnchors": [
{
"id": "customTool_0-output-customTool-CustomTool|Tool|StructuredTool",
"name": "customTool",
"label": "CustomTool",
"type": "CustomTool | Tool | StructuredTool"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 883.9529939431576,
"y": -32.32503903826486
},
"dragging": false
}
],
"edges": [
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain|Serializable",
"source": "serper_0",
"sourceHandle": "serper_0-output-serper-Serper|Tool|StructuredTool|BaseLangChain|Serializable",
"target": "openAIFunctionAgent_0",
"targetHandle": "openAIFunctionAgent_0-input-model-BaseChatModel",
"targetHandle": "openAIFunctionAgent_0-input-tools-Tool",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain|Serializable-openAIFunctionAgent_0-openAIFunctionAgent_0-input-model-BaseChatModel",
"id": "serper_0-serper_0-output-serper-Serper|Tool|StructuredTool|BaseLangChain|Serializable-openAIFunctionAgent_0-openAIFunctionAgent_0-input-tools-Tool",
"data": {
"label": ""
}
@ -317,12 +435,34 @@
}
},
{
"source": "serper_0",
"sourceHandle": "serper_0-output-serper-Serper|Tool|StructuredTool|BaseLangChain|Serializable",
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain|Serializable",
"target": "openAIFunctionAgent_0",
"targetHandle": "openAIFunctionAgent_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain|Serializable-openAIFunctionAgent_0-openAIFunctionAgent_0-input-model-BaseChatModel",
"data": {
"label": ""
}
},
{
"source": "bufferMemory_0",
"sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
"target": "openAIFunctionAgent_0",
"targetHandle": "openAIFunctionAgent_0-input-memory-BaseChatMemory",
"type": "buttonedge",
"id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-openAIFunctionAgent_0-openAIFunctionAgent_0-input-memory-BaseChatMemory",
"data": {
"label": ""
}
},
{
"source": "customTool_0",
"sourceHandle": "customTool_0-output-customTool-CustomTool|Tool|StructuredTool",
"target": "openAIFunctionAgent_0",
"targetHandle": "openAIFunctionAgent_0-input-tools-Tool",
"type": "buttonedge",
"id": "serper_0-serper_0-output-serper-Serper|Tool|StructuredTool|BaseLangChain|Serializable-openAIFunctionAgent_0-openAIFunctionAgent_0-input-tools-Tool",
"id": "customTool_0-customTool_0-output-customTool-CustomTool|Tool|StructuredTool-openAIFunctionAgent_0-openAIFunctionAgent_0-input-tools-Tool",
"data": {
"label": ""
}

View File

@ -3,7 +3,7 @@
"nodes": [
{
"width": 300,
"height": 526,
"height": 524,
"id": "openAI_2",
"position": {
"x": 793.6674026500068,
@ -156,213 +156,11 @@
},
{
"width": 300,
"height": 534,
"id": "promptTemplate_2",
"position": {
"x": 796.3399644963663,
"y": 512.349657546027
},
"type": "customNode",
"data": {
"id": "promptTemplate_2",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 4,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_2-input-template-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 4,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "promptTemplate_2-input-promptValues-string"
}
],
"inputAnchors": [],
"inputs": {
"template": "You are an AI who performs one task based on the following objective: {objective}.\nRespond with how you would complete this task:",
"promptValues": "{\n \"objective\": \"{{question}}\"\n}"
},
"outputAnchors": [
{
"id": "promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 796.3399644963663,
"y": 512.349657546027
},
"dragging": false
},
{
"width": 300,
"height": 407,
"id": "llmChain_2",
"position": {
"x": 1225.2861408370582,
"y": 485.62403908243243
},
"type": "customNode",
"data": {
"id": "llmChain_2",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["LLMChain", "BaseChain", "BaseLangChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Chain Name",
"name": "chainName",
"type": "string",
"placeholder": "Name Your Chain",
"optional": true,
"id": "llmChain_2-input-chainName-string"
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "llmChain_2-input-model-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_2-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"model": "{{openAI_2.data.instance}}",
"prompt": "{{promptTemplate_2.data.instance}}",
"chainName": "First Chain"
},
"outputAnchors": [
{
"name": "output",
"label": "Output",
"type": "options",
"options": [
{
"id": "llmChain_2-output-llmChain-LLMChain|BaseChain|BaseLangChain",
"name": "llmChain",
"label": "LLM Chain",
"type": "LLMChain | BaseChain | BaseLangChain"
},
{
"id": "llmChain_2-output-outputPrediction-string",
"name": "outputPrediction",
"label": "Output Prediction",
"type": "string"
}
],
"default": "llmChain"
}
],
"outputs": {
"output": "outputPrediction"
},
"selected": false
},
"selected": false,
"dragging": false,
"positionAbsolute": {
"x": 1225.2861408370582,
"y": 485.62403908243243
}
},
{
"width": 300,
"height": 534,
"id": "promptTemplate_3",
"position": {
"x": 1589.206555911206,
"y": 460.23470154201766
},
"type": "customNode",
"data": {
"id": "promptTemplate_3",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 4,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_3-input-template-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 4,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "promptTemplate_3-input-promptValues-string"
}
],
"inputAnchors": [],
"inputs": {
"template": "You are a task creation AI that uses the result of an execution agent to create new tasks with the following objective: {objective}.\nThe last completed task has the result: {result}.\nBased on the result, create new tasks to be completed by the AI system that do not overlap with result.\nReturn the tasks as an array.",
"promptValues": "{\n \"objective\": \"{{question}}\",\n \"result\": \"\"\n}"
},
"outputAnchors": [
{
"id": "promptTemplate_3-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1589.206555911206,
"y": 460.23470154201766
},
"dragging": false
},
{
"width": 300,
"height": 526,
"height": 524,
"id": "openAI_3",
"position": {
"x": 1225.2861408370586,
"y": -62.7856517905272
"x": 1216.061423775753,
"y": -20.35195330852082
},
"type": "customNode",
"data": {
@ -503,27 +301,145 @@
"selected": false
},
"positionAbsolute": {
"x": 1225.2861408370586,
"y": -62.7856517905272
"x": 1216.061423775753,
"y": -20.35195330852082
},
"selected": false,
"dragging": false
},
{
"width": 300,
"height": 407,
"id": "llmChain_3",
"height": 475,
"id": "promptTemplate_0",
"position": {
"x": 1972.2671768945252,
"y": 142.73435419451476
"x": 792.9464838535649,
"y": 527.1718536712464
},
"type": "customNode",
"data": {
"id": "llmChain_3",
"id": "promptTemplate_0",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 4,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_0-input-template-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "json",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "promptTemplate_0-input-promptValues-json"
}
],
"inputAnchors": [],
"inputs": {
"template": "You are an AI who performs one task based on the following objective: {objective}.\nRespond with how you would complete this task:",
"promptValues": "{\"objective\":\"{{question}}\"}"
},
"outputAnchors": [
{
"id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 792.9464838535649,
"y": 527.1718536712464
},
"dragging": false
},
{
"width": 300,
"height": 475,
"id": "promptTemplate_1",
"position": {
"x": 1577.7482561604884,
"y": 516.186942924815
},
"type": "customNode",
"data": {
"id": "promptTemplate_1",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 4,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_1-input-template-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "json",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "promptTemplate_1-input-promptValues-json"
}
],
"inputAnchors": [],
"inputs": {
"template": "You are a task creation AI that uses the result of an execution agent to create new tasks with the following objective: {objective}.\nThe last completed task has the result: {result}.\nBased on the result, create new tasks to be completed by the AI system that do not overlap with result.\nReturn the tasks as an array.",
"promptValues": "{\"objective\":\"{{question}}\",\"result\":\"{{llmChain_0.data.instance}}\"}"
},
"outputAnchors": [
{
"id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"positionAbsolute": {
"x": 1577.7482561604884,
"y": 516.186942924815
},
"selected": false,
"dragging": false
},
{
"width": 300,
"height": 405,
"id": "llmChain_0",
"position": {
"x": 1221.1346231272787,
"y": 538.9546839784628
},
"type": "customNode",
"data": {
"id": "llmChain_0",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["LLMChain", "BaseChain", "BaseLangChain"],
"baseClasses": ["LLMChain", "BaseChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
@ -533,7 +449,7 @@
"type": "string",
"placeholder": "Name Your Chain",
"optional": true,
"id": "llmChain_3-input-chainName-string"
"id": "llmChain_0-input-chainName-string"
}
],
"inputAnchors": [
@ -541,18 +457,98 @@
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "llmChain_3-input-model-BaseLanguageModel"
"id": "llmChain_0-input-model-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_3-input-prompt-BasePromptTemplate"
"id": "llmChain_0-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"model": "{{openAI_2.data.instance}}",
"prompt": "{{promptTemplate_0.data.instance}}",
"chainName": "FirstChain"
},
"outputAnchors": [
{
"name": "output",
"label": "Output",
"type": "options",
"options": [
{
"id": "llmChain_0-output-llmChain-LLMChain|BaseChain",
"name": "llmChain",
"label": "LLM Chain",
"type": "LLMChain | BaseChain"
},
{
"id": "llmChain_0-output-outputPrediction-string|json",
"name": "outputPrediction",
"label": "Output Prediction",
"type": "string | json"
}
],
"default": "llmChain"
}
],
"outputs": {
"output": "outputPrediction"
},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1221.1346231272787,
"y": 538.9546839784628
},
"dragging": false
},
{
"width": 300,
"height": 405,
"id": "llmChain_1",
"position": {
"x": 1971.8054567964418,
"y": 207.624530381245
},
"type": "customNode",
"data": {
"id": "llmChain_1",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["LLMChain", "BaseChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Chain Name",
"name": "chainName",
"type": "string",
"placeholder": "Name Your Chain",
"optional": true,
"id": "llmChain_1-input-chainName-string"
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "llmChain_1-input-model-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_1-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"model": "{{openAI_3.data.instance}}",
"prompt": "{{promptTemplate_3.data.instance}}",
"prompt": "{{promptTemplate_1.data.instance}}",
"chainName": "LastChain"
},
"outputAnchors": [
@ -562,16 +558,16 @@
"type": "options",
"options": [
{
"id": "llmChain_3-output-llmChain-LLMChain|BaseChain|BaseLangChain",
"id": "llmChain_1-output-llmChain-LLMChain|BaseChain",
"name": "llmChain",
"label": "LLM Chain",
"type": "LLMChain | BaseChain | BaseLangChain"
"type": "LLMChain | BaseChain"
},
{
"id": "llmChain_3-output-outputPrediction-string",
"id": "llmChain_1-output-outputPrediction-string|json",
"name": "outputPrediction",
"label": "Output Prediction",
"type": "string"
"type": "string | json"
}
],
"default": "llmChain"
@ -583,43 +579,43 @@
"selected": false
},
"selected": false,
"dragging": false,
"positionAbsolute": {
"x": 1972.2671768945252,
"y": 142.73435419451476
}
"x": 1971.8054567964418,
"y": 207.624530381245
},
"dragging": false
}
],
"edges": [
{
"source": "llmChain_2",
"sourceHandle": "llmChain_2-output-outputPrediction-string",
"target": "promptTemplate_3",
"targetHandle": "promptTemplate_3-input-promptValues-string",
"type": "buttonedge",
"id": "llmChain_2-llmChain_2-output-outputPrediction-string-promptTemplate_3-promptTemplate_3-input-promptValues-string",
"data": {
"label": ""
}
},
{
"source": "openAI_2",
"sourceHandle": "openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain",
"target": "llmChain_2",
"targetHandle": "llmChain_2-input-model-BaseLanguageModel",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "openAI_2-openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-llmChain_2-llmChain_2-input-model-BaseLanguageModel",
"id": "openAI_2-openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-llmChain_0-llmChain_0-input-model-BaseLanguageModel",
"data": {
"label": ""
}
},
{
"source": "promptTemplate_2",
"sourceHandle": "promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_2",
"targetHandle": "llmChain_2-input-prompt-BasePromptTemplate",
"source": "promptTemplate_0",
"sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_2-promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_2-llmChain_2-input-prompt-BasePromptTemplate",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}
},
{
"source": "llmChain_0",
"sourceHandle": "llmChain_0-output-outputPrediction-string|json",
"target": "promptTemplate_1",
"targetHandle": "promptTemplate_1-input-promptValues-json",
"type": "buttonedge",
"id": "llmChain_0-llmChain_0-output-outputPrediction-string|json-promptTemplate_1-promptTemplate_1-input-promptValues-json",
"data": {
"label": ""
}
@ -627,21 +623,21 @@
{
"source": "openAI_3",
"sourceHandle": "openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain",
"target": "llmChain_3",
"targetHandle": "llmChain_3-input-model-BaseLanguageModel",
"target": "llmChain_1",
"targetHandle": "llmChain_1-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "openAI_3-openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-llmChain_3-llmChain_3-input-model-BaseLanguageModel",
"id": "openAI_3-openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-llmChain_1-llmChain_1-input-model-BaseLanguageModel",
"data": {
"label": ""
}
},
{
"source": "promptTemplate_3",
"sourceHandle": "promptTemplate_3-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_3",
"targetHandle": "llmChain_3-input-prompt-BasePromptTemplate",
"source": "promptTemplate_1",
"sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_1",
"targetHandle": "llmChain_1-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_3-promptTemplate_3-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_3-llmChain_3-input-prompt-BasePromptTemplate",
"id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}

View File

@ -3,7 +3,7 @@
"nodes": [
{
"width": 300,
"height": 526,
"height": 524,
"id": "openAI_1",
"position": {
"x": 510.75932526856377,
@ -156,68 +156,7 @@
},
{
"width": 300,
"height": 534,
"id": "promptTemplate_1",
"position": {
"x": 514.5434056794296,
"y": 507.47798128037107
},
"type": "customNode",
"data": {
"id": "promptTemplate_1",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 4,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_1-input-template-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 4,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "promptTemplate_1-input-promptValues-string"
}
],
"inputAnchors": [],
"inputs": {
"template": "",
"promptValues": ""
},
"outputAnchors": [
{
"id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 514.5434056794296,
"y": 507.47798128037107
},
"dragging": false
},
{
"width": 300,
"height": 407,
"height": 405,
"id": "llmChain_1",
"position": {
"x": 970.9254258940236,
@ -258,7 +197,7 @@
],
"inputs": {
"model": "{{openAI_1.data.instance}}",
"prompt": "{{promptTemplate_1.data.instance}}",
"prompt": "{{promptTemplate_0.data.instance}}",
"chainName": ""
},
"outputAnchors": [
@ -274,10 +213,10 @@
"type": "LLMChain | BaseChain | BaseLangChain"
},
{
"id": "llmChain_1-output-outputPrediction-string",
"id": "llmChain_1-output-outputPrediction-string|json",
"name": "outputPrediction",
"label": "Output Prediction",
"type": "string"
"type": "string | json"
}
],
"default": "llmChain"
@ -294,6 +233,65 @@
},
"selected": false,
"dragging": false
},
{
"width": 300,
"height": 475,
"id": "promptTemplate_0",
"position": {
"x": 517.7412884791509,
"y": 506.7411400888471
},
"type": "customNode",
"data": {
"id": "promptTemplate_0",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 4,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_0-input-template-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "json",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "promptTemplate_0-input-promptValues-json"
}
],
"inputAnchors": [],
"inputs": {
"template": "What is a good name for a company that makes {product}?",
"promptValues": ""
},
"outputAnchors": [
{
"id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 517.7412884791509,
"y": 506.7411400888471
},
"dragging": false
}
],
"edges": [
@ -309,12 +307,12 @@
}
},
{
"source": "promptTemplate_1",
"sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"source": "promptTemplate_0",
"sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_1",
"targetHandle": "llmChain_1-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}

View File

@ -3,77 +3,7 @@
"nodes": [
{
"width": 300,
"height": 711,
"id": "chatPromptTemplate_1",
"position": {
"x": 441.8516979620723,
"y": 636.1108860994266
},
"type": "customNode",
"data": {
"id": "chatPromptTemplate_1",
"label": "Chat Prompt Template",
"name": "chatPromptTemplate",
"type": "ChatPromptTemplate",
"baseClasses": ["ChatPromptTemplate", "BaseChatPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a chat prompt",
"inputParams": [
{
"label": "System Message",
"name": "systemMessagePrompt",
"type": "string",
"rows": 4,
"placeholder": "You are a helpful assistant that translates {input_language} to {output_language}.",
"id": "chatPromptTemplate_1-input-systemMessagePrompt-string"
},
{
"label": "Human Message",
"name": "humanMessagePrompt",
"type": "string",
"rows": 4,
"placeholder": "{text}",
"id": "chatPromptTemplate_1-input-humanMessagePrompt-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 4,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "chatPromptTemplate_1-input-promptValues-string"
}
],
"inputAnchors": [],
"inputs": {
"systemMessagePrompt": "You are a helpful assistant that translates {input_language} to {output_language}.",
"humanMessagePrompt": "{input}",
"promptValues": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}"
},
"outputAnchors": [
{
"id": "chatPromptTemplate_1-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate",
"name": "chatPromptTemplate",
"label": "ChatPromptTemplate",
"type": "ChatPromptTemplate | BaseChatPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 441.8516979620723,
"y": 636.1108860994266
},
"dragging": false
},
{
"width": 300,
"height": 526,
"height": 524,
"id": "chatOpenAI_1",
"position": {
"x": 439.5219561593599,
@ -224,7 +154,7 @@
},
{
"width": 300,
"height": 407,
"height": 405,
"id": "llmChain_1",
"position": {
"x": 865.7775572410412,
@ -265,7 +195,7 @@
],
"inputs": {
"model": "{{chatOpenAI_1.data.instance}}",
"prompt": "{{chatPromptTemplate_1.data.instance}}",
"prompt": "{{chatPromptTemplate_0.data.instance}}",
"chainName": "Language Translation"
},
"outputAnchors": [
@ -281,10 +211,10 @@
"type": "LLMChain | BaseChain | BaseLangChain"
},
{
"id": "llmChain_1-output-outputPrediction-string",
"id": "llmChain_1-output-outputPrediction-string|json",
"name": "outputPrediction",
"label": "Output Prediction",
"type": "string"
"type": "string | json"
}
],
"default": "llmChain"
@ -301,6 +231,74 @@
"y": 543.9211372857111
},
"dragging": false
},
{
"width": 300,
"height": 652,
"id": "chatPromptTemplate_0",
"position": {
"x": 437.51367850489396,
"y": 649.7619214034173
},
"type": "customNode",
"data": {
"id": "chatPromptTemplate_0",
"label": "Chat Prompt Template",
"name": "chatPromptTemplate",
"type": "ChatPromptTemplate",
"baseClasses": ["ChatPromptTemplate", "BaseChatPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a chat prompt",
"inputParams": [
{
"label": "System Message",
"name": "systemMessagePrompt",
"type": "string",
"rows": 4,
"placeholder": "You are a helpful assistant that translates {input_language} to {output_language}.",
"id": "chatPromptTemplate_0-input-systemMessagePrompt-string"
},
{
"label": "Human Message",
"name": "humanMessagePrompt",
"type": "string",
"rows": 4,
"placeholder": "{text}",
"id": "chatPromptTemplate_0-input-humanMessagePrompt-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "json",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "chatPromptTemplate_0-input-promptValues-json"
}
],
"inputAnchors": [],
"inputs": {
"systemMessagePrompt": "You are a helpful assistant that translates {input_language} to {output_language}.",
"humanMessagePrompt": "{text}",
"promptValues": "{\"input_language\":\"English\",\"output_language\":\"French\",\"text\":\"{{question}}\"}"
},
"outputAnchors": [
{
"id": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate",
"name": "chatPromptTemplate",
"label": "ChatPromptTemplate",
"type": "ChatPromptTemplate | BaseChatPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 437.51367850489396,
"y": 649.7619214034173
},
"dragging": false
}
],
"edges": [
@ -316,12 +314,12 @@
}
},
{
"source": "chatPromptTemplate_1",
"sourceHandle": "chatPromptTemplate_1-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate",
"source": "chatPromptTemplate_0",
"sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate",
"target": "llmChain_1",
"targetHandle": "llmChain_1-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "chatPromptTemplate_1-chatPromptTemplate_1-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate",
"id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}

View File

@ -0,0 +1,8 @@
{
"name": "add_contact_hubspot",
"description": "Add new contact to Hubspot",
"color": "linear-gradient(rgb(85,198,123), rgb(0,230,99))",
"iconSrc": "https://cdn.worldvectorlogo.com/logos/hubspot-1.svg",
"schema": "[{\"id\":1,\"property\":\"email\",\"description\":\"email address of contact\",\"type\":\"string\",\"required\":true},{\"id\":2,\"property\":\"firstname\",\"description\":\"first name of contact\",\"type\":\"string\",\"required\":false},{\"id\":3,\"property\":\"lastname\",\"description\":\"last name of contact\",\"type\":\"string\",\"required\":false}]",
"func": "const fetch = require('node-fetch');\nconst url = 'https://api.hubapi.com/crm/v3/objects/contacts'\nconst token = 'YOUR-TOKEN';\n\nconst body = {\n\t\"properties\": {\n\t \"email\": $email\n\t}\n};\n\nif ($firstname) body.properties.firstname = $firstname;\nif ($lastname) body.properties.lastname = $lastname;\n\nconst options = {\n\tmethod: 'POST',\n\theaders: {\n\t 'Authorization': `Bearer ${token}`,\n\t\t'Content-Type': 'application/json'\n\t},\n\tbody: JSON.stringify(body)\n};\n\ntry {\n\tconst response = await fetch(url, options);\n\tconst text = await response.text();\n\treturn text;\n} catch (error) {\n\tconsole.error(error);\n\treturn '';\n}"
}

View File

@ -0,0 +1,8 @@
{
"name": "add_airtable",
"description": "Add column1, column2 to Airtable",
"color": "linear-gradient(rgb(125,71,222), rgb(128,102,23))",
"iconSrc": "https://raw.githubusercontent.com/gilbarbara/logos/main/logos/airtable.svg",
"schema": "[{\"id\":0,\"property\":\"column1\",\"description\":\"this is column1\",\"type\":\"string\",\"required\":true},{\"id\":1,\"property\":\"column2\",\"description\":\"this is column2\",\"type\":\"string\",\"required\":true}]",
"func": "const fetch = require('node-fetch');\nconst baseId = 'YOUR-BASE-ID';\nconst tableId = 'YOUR-TABLE-ID';\nconst token = 'YOUR-TOKEN';\n\nconst body = {\n\t\"records\": [\n\t\t{\n\t\t\t\"fields\": {\n\t\t\t\t\"column1\": $column1,\n\t\t\t\t\"column2\": $column2,\n\t\t\t}\n\t\t}\n\t]\n};\n\nconst options = {\n\tmethod: 'POST',\n\theaders: {\n\t\t'Authorization': `Bearer ${token}`,\n\t\t'Content-Type': 'application/json'\n\t},\n\tbody: JSON.stringify(body)\n};\n\nconst url = `https://api.airtable.com/v0/${baseId}/${tableId}`\n\ntry {\n\tconst response = await fetch(url, options);\n\tconst text = await response.text();\n\treturn text;\n} catch (error) {\n\tconsole.error(error);\n\treturn '';\n}"
}

View File

@ -0,0 +1,8 @@
{
"name": "get_stock_movers",
"description": "Get the stocks that has biggest price/volume moves, e.g. actives, gainers, losers, etc.",
"iconSrc": "https://rapidapi.com/cdn/images?url=https://rapidapi-prod-apis.s3.amazonaws.com/9c/e743343bdd41edad39a3fdffd5b974/016c33699f51603ae6fe4420c439124b.png",
"color": "linear-gradient(rgb(191,202,167), rgb(143,202,246))",
"schema": "[]",
"func": "const fetch = require('node-fetch');\nconst url = 'https://morning-star.p.rapidapi.com/market/v2/get-movers';\nconst options = {\n\tmethod: 'GET',\n\theaders: {\n\t\t'X-RapidAPI-Key': 'YOUR-API-KEY',\n\t\t'X-RapidAPI-Host': 'morning-star.p.rapidapi.com'\n\t}\n};\n\ntry {\n\tconst response = await fetch(url, options);\n\tconst result = await response.text();\n\tconsole.log(result);\n\treturn result;\n} catch (error) {\n\tconsole.error(error);\n\treturn '';\n}"
}

View File

@ -0,0 +1,8 @@
{
"name": "send_message_to_discord_channel",
"description": "Send message to Discord channel",
"color": "linear-gradient(rgb(155,190,84), rgb(176,69,245))",
"iconSrc": "https://raw.githubusercontent.com/gilbarbara/logos/main/logos/discord-icon.svg",
"schema": "[{\"id\":1,\"property\":\"content\",\"description\":\"message to send\",\"type\":\"string\",\"required\":true}]",
"func": "const fetch = require('node-fetch');\nconst webhookUrl = 'YOUR-WEBHOOK-URL'\n\nconst body = {\n\t\"content\": $content\n};\n\nconst options = {\n\tmethod: 'POST',\n\theaders: {\n\t\t'Content-Type': 'application/json'\n\t},\n\tbody: JSON.stringify(body)\n};\n\nconst url = `${webhookUrl}?wait=true`\n\ntry {\n\tconst response = await fetch(url, options);\n\tconst text = await response.text();\n\treturn text;\n} catch (error) {\n\tconsole.error(error);\n\treturn '';\n}"
}

View File

@ -0,0 +1,8 @@
{
"name": "send_message_to_slack_channel",
"description": "Send message to Slack channel",
"color": "linear-gradient(rgb(155,190,84), rgb(176,69,245))",
"iconSrc": "https://raw.githubusercontent.com/gilbarbara/logos/main/logos/slack-icon.svg",
"schema": "[{\"id\":1,\"property\":\"text\",\"description\":\"message to send\",\"type\":\"string\",\"required\":true}]",
"func": "const fetch = require('node-fetch');\nconst webhookUrl = 'YOUR-WEBHOOK-URL'\n\nconst body = {\n\t\"text\": $text\n};\n\nconst options = {\n\tmethod: 'POST',\n\theaders: {\n\t\t'Content-Type': 'application/json'\n\t},\n\tbody: JSON.stringify(body)\n};\n\nconst url = `${webhookUrl}`\n\ntry {\n\tconst response = await fetch(url, options);\n\tconst text = await response.text();\n\treturn text;\n} catch (error) {\n\tconsole.error(error);\n\treturn '';\n}"
}

View File

@ -0,0 +1,8 @@
{
"name": "send_message_to_teams_channel",
"description": "Send message to Teams channel",
"color": "linear-gradient(rgb(155,190,84), rgb(176,69,245))",
"iconSrc": "https://raw.githubusercontent.com/gilbarbara/logos/main/logos/microsoft-teams.svg",
"schema": "[{\"id\":1,\"property\":\"content\",\"description\":\"message to send\",\"type\":\"string\",\"required\":true}]",
"func": "const fetch = require('node-fetch');\nconst webhookUrl = 'YOUR-WEBHOOK-URL'\n\nconst body = {\n\t\"content\": $content\n};\n\nconst options = {\n\tmethod: 'POST',\n\theaders: {\n\t\t'Content-Type': 'application/json'\n\t},\n\tbody: JSON.stringify(body)\n};\n\nconst url = `${webhookUrl}?wait=true`\n\ntry {\n\tconst response = await fetch(url, options);\n\tconst text = await response.text();\n\treturn text;\n} catch (error) {\n\tconsole.error(error);\n\treturn '';\n}"
}

View File

@ -0,0 +1,8 @@
{
"name": "sendgrid_email",
"description": "Send email using SendGrid",
"color": "linear-gradient(rgb(230,108,70), rgb(222,4,98))",
"iconSrc": "https://raw.githubusercontent.com/gilbarbara/logos/main/logos/sendgrid-icon.svg",
"schema": "[{\"id\":0,\"property\":\"fromEmail\",\"description\":\"Email address used to send the message\",\"type\":\"string\",\"required\":true},{\"id\":1,\"property\":\"toEmail \",\"description\":\"The intended recipient's email address\",\"type\":\"string\",\"required\":true},{\"id\":2,\"property\":\"subject\",\"description\":\"The subject of email\",\"type\":\"string\",\"required\":true},{\"id\":3,\"property\":\"content\",\"description\":\"Content of email\",\"type\":\"string\",\"required\":true}]",
"func": "const fetch = require('node-fetch');\nconst url = 'https://api.sendgrid.com/v3/mail/send';\nconst api_key = 'YOUR-API-KEY';\n\nconst body = {\n \"personalizations\": [\n {\n \"to\": [{ \"email\": $toEmail }]\n }\n ],\n\t\"from\": {\n\t \"email\": $fromEmail\n\t},\n\t\"subject\": $subject,\n\t\"content\": [\n\t {\n\t \"type\": 'text/plain',\n\t \"value\": $content\n\t }\n\t]\n};\n\nconst options = {\n\tmethod: 'POST',\n\theaders: {\n\t 'Authorization': `Bearer ${api_key}`,\n\t\t'Content-Type': 'application/json'\n\t},\n\tbody: JSON.stringify(body)\n};\n\ntry {\n\tconst response = await fetch(url, options);\n\tconst text = await response.text();\n\treturn text;\n} catch (error) {\n\tconsole.error(error);\n\treturn '';\n}"
}

View File

@ -1,6 +1,6 @@
{
"name": "flowise",
"version": "1.2.12",
"version": "1.2.14",
"description": "Flowiseai Server",
"main": "dist/index",
"types": "dist/index.d.ts",

View File

@ -1,5 +1,10 @@
import path from 'path'
import { IChildProcessMessage, IReactFlowNode, IReactFlowObject, IRunChatflowMessageValue, INodeData } from './Interface'
import { buildLangchain, constructGraphs, getEndingNode, getStartingNodes, resolveVariables } from './utils'
import { buildLangchain, constructGraphs, getEndingNode, getStartingNodes, getUserHome, resolveVariables } from './utils'
import { DataSource } from 'typeorm'
import { ChatFlow } from './entity/ChatFlow'
import { ChatMessage } from './entity/ChatMessage'
import { Tool } from './entity/Tool'
export class ChildProcess {
/**
@ -22,6 +27,8 @@ export class ChildProcess {
await sendToParentProcess('start', '_')
const childAppDataSource = await initDB()
// Create a Queue and add our initial node in it
const { endingNodeData, chatflow, chatId, incomingInput, componentNodes } = messageValue
@ -84,6 +91,7 @@ export class ChildProcess {
componentNodes,
incomingInput.question,
chatId,
childAppDataSource,
incomingInput?.overrideConfig
)
@ -115,6 +123,22 @@ export class ChildProcess {
}
}
/**
* Initalize DB in child process
* @returns {DataSource}
*/
async function initDB() {
const homePath = process.env.DATABASE_PATH ?? path.join(getUserHome(), '.flowise')
const childAppDataSource = new DataSource({
type: 'sqlite',
database: path.resolve(homePath, 'database.sqlite'),
synchronize: true,
entities: [ChatFlow, ChatMessage, Tool],
migrations: []
})
return await childAppDataSource.initialize()
}
/**
* Send data back to parent process
* @param {string} key Key of message

View File

@ -3,18 +3,19 @@ import path from 'path'
import { DataSource } from 'typeorm'
import { ChatFlow } from './entity/ChatFlow'
import { ChatMessage } from './entity/ChatMessage'
import { Tool } from './entity/Tool'
import { getUserHome } from './utils'
let appDataSource: DataSource
export const init = async (): Promise<void> => {
const homePath = path.join(getUserHome(), '.flowise')
const homePath = process.env.DATABASE_PATH ?? path.join(getUserHome(), '.flowise')
appDataSource = new DataSource({
type: 'sqlite',
database: path.resolve(homePath, 'database.sqlite'),
synchronize: true,
entities: [ChatFlow, ChatMessage],
entities: [ChatFlow, ChatMessage, Tool],
migrations: []
})
}

View File

@ -9,10 +9,12 @@ export interface IChatFlow {
id: string
name: string
flowData: string
apikeyid: string
deployed: boolean
updatedDate: Date
createdDate: Date
deployed?: boolean
isPublic?: boolean
apikeyid?: string
chatbotConfig?: string
}
export interface IChatMessage {
@ -21,7 +23,19 @@ export interface IChatMessage {
content: string
chatflowid: string
createdDate: Date
sourceDocuments: string
sourceDocuments?: string
}
export interface ITool {
id: string
name: string
description: string
color: string
iconSrc?: string
schema?: string
func?: string
updatedDate: Date
createdDate: Date
}
export interface IComponentNodes {

View File

@ -18,6 +18,9 @@ export default class Start extends Command {
FLOWISE_USERNAME: Flags.string(),
FLOWISE_PASSWORD: Flags.string(),
PORT: Flags.string(),
DEBUG: Flags.string(),
DATABASE_PATH: Flags.string(),
APIKEY_PATH: Flags.string(),
EXECUTION_MODE: Flags.string()
}
@ -53,7 +56,10 @@ export default class Start extends Command {
if (flags.FLOWISE_USERNAME) process.env.FLOWISE_USERNAME = flags.FLOWISE_USERNAME
if (flags.FLOWISE_PASSWORD) process.env.FLOWISE_PASSWORD = flags.FLOWISE_PASSWORD
if (flags.PORT) process.env.PORT = flags.PORT
if (flags.DATABASE_PATH) process.env.DATABASE_PATH = flags.DATABASE_PATH
if (flags.APIKEY_PATH) process.env.APIKEY_PATH = flags.APIKEY_PATH
if (flags.EXECUTION_MODE) process.env.EXECUTION_MODE = flags.EXECUTION_MODE
if (flags.DEBUG) process.env.DEBUG = flags.DEBUG
await (async () => {
try {

View File

@ -14,10 +14,16 @@ export class ChatFlow implements IChatFlow {
flowData: string
@Column({ nullable: true })
apikeyid: string
deployed?: boolean
@Column()
deployed: boolean
@Column({ nullable: true })
isPublic?: boolean
@Column({ nullable: true })
apikeyid?: string
@Column({ nullable: true })
chatbotConfig?: string
@CreateDateColumn()
createdDate: Date

View File

@ -18,7 +18,7 @@ export class ChatMessage implements IChatMessage {
content: string
@Column({ nullable: true })
sourceDocuments: string
sourceDocuments?: string
@CreateDateColumn()
createdDate: Date

View File

@ -0,0 +1,33 @@
/* eslint-disable */
import { Entity, Column, CreateDateColumn, UpdateDateColumn, PrimaryGeneratedColumn } from 'typeorm'
import { ITool } from '../Interface'
@Entity()
export class Tool implements ITool {
@PrimaryGeneratedColumn('uuid')
id: string
@Column()
name: string
@Column()
description: string
@Column()
color: string
@Column({ nullable: true })
iconSrc?: string
@Column({ nullable: true })
schema?: string
@Column({ nullable: true })
func?: string
@CreateDateColumn()
createdDate: Date
@UpdateDateColumn()
updatedDate: Date
}

View File

@ -35,7 +35,9 @@ import {
isSameOverrideConfig,
replaceAllAPIKeys,
isFlowValidForStream,
isVectorStoreFaiss
isVectorStoreFaiss,
databaseEntities,
getApiKey
} from './utils'
import { cloneDeep } from 'lodash'
import { getDataSource } from './DataSource'
@ -43,8 +45,9 @@ import { NodesPool } from './NodesPool'
import { ChatFlow } from './entity/ChatFlow'
import { ChatMessage } from './entity/ChatMessage'
import { ChatflowPool } from './ChatflowPool'
import { ICommonObject } from 'flowise-components'
import { ICommonObject, INodeOptionsValue } from 'flowise-components'
import { fork } from 'child_process'
import { Tool } from './entity/Tool'
export class App {
app: express.Application
@ -90,7 +93,14 @@ export class App {
const basicAuthMiddleware = basicAuth({
users: { [username]: password }
})
const whitelistURLs = ['/api/v1/prediction/', '/api/v1/node-icon/', '/api/v1/chatflows-streaming']
const whitelistURLs = [
'/api/v1/verify/apikey/',
'/api/v1/chatflows/apikey/',
'/api/v1/public-chatflows',
'/api/v1/prediction/',
'/api/v1/node-icon/',
'/api/v1/chatflows-streaming'
]
this.app.use((req, res, next) => {
if (req.url.includes('/api/v1/')) {
whitelistURLs.some((url) => req.url.includes(url)) ? next() : basicAuthMiddleware(req, res, next)
@ -142,6 +152,29 @@ export class App {
}
})
// load async options
this.app.post('/api/v1/node-load-method/:name', async (req: Request, res: Response) => {
const nodeData: INodeData = req.body
if (Object.prototype.hasOwnProperty.call(this.nodesPool.componentNodes, req.params.name)) {
try {
const nodeInstance = this.nodesPool.componentNodes[req.params.name]
const methodName = nodeData.loadMethod || ''
const returnOptions: INodeOptionsValue[] = await nodeInstance.loadMethods![methodName]!.call(nodeInstance, nodeData, {
appDataSource: this.AppDataSource,
databaseEntities: databaseEntities
})
return res.json(returnOptions)
} catch (error) {
return res.json([])
}
} else {
res.status(404).send(`Node ${req.params.name} not found`)
return
}
})
// ----------------------------------------
// Chatflows
// ----------------------------------------
@ -152,6 +185,25 @@ export class App {
return res.json(chatflows)
})
// Get specific chatflow via api key
this.app.get('/api/v1/chatflows/apikey/:apiKey', async (req: Request, res: Response) => {
try {
const apiKey = await getApiKey(req.params.apiKey)
if (!apiKey) return res.status(401).send('Unauthorized')
const chatflows = await this.AppDataSource.getRepository(ChatFlow)
.createQueryBuilder('cf')
.where('cf.apikeyid = :apikeyid', { apikeyid: apiKey.id })
.orWhere('cf.apikeyid IS NULL')
.orWhere('cf.apikeyid = ""')
.orderBy('cf.name', 'ASC')
.getMany()
if (chatflows.length >= 1) return res.status(200).send(chatflows)
return res.status(404).send('Chatflow not found')
} catch (err: any) {
return res.status(500).send(err?.message)
}
})
// Get specific chatflow via id
this.app.get('/api/v1/chatflows/:id', async (req: Request, res: Response) => {
const chatflow = await this.AppDataSource.getRepository(ChatFlow).findOneBy({
@ -161,6 +213,16 @@ export class App {
return res.status(404).send(`Chatflow ${req.params.id} not found`)
})
// Get specific chatflow via id (PUBLIC endpoint, used when sharing chatbot link)
this.app.get('/api/v1/public-chatflows/:id', async (req: Request, res: Response) => {
const chatflow = await this.AppDataSource.getRepository(ChatFlow).findOneBy({
id: req.params.id
})
if (chatflow && chatflow.isPublic) return res.json(chatflow)
else if (chatflow && !chatflow.isPublic) return res.status(401).send(`Unauthorized`)
return res.status(404).send(`Chatflow ${req.params.id} not found`)
})
// Save chatflow
this.app.post('/api/v1/chatflows', async (req: Request, res: Response) => {
const body = req.body
@ -257,6 +319,63 @@ export class App {
return res.json(results)
})
// ----------------------------------------
// Tools
// ----------------------------------------
// Get all tools
this.app.get('/api/v1/tools', async (req: Request, res: Response) => {
const tools = await this.AppDataSource.getRepository(Tool).find()
return res.json(tools)
})
// Get specific tool
this.app.get('/api/v1/tools/:id', async (req: Request, res: Response) => {
const tool = await this.AppDataSource.getRepository(Tool).findOneBy({
id: req.params.id
})
return res.json(tool)
})
// Add tool
this.app.post('/api/v1/tools', async (req: Request, res: Response) => {
const body = req.body
const newTool = new Tool()
Object.assign(newTool, body)
const tool = this.AppDataSource.getRepository(Tool).create(newTool)
const results = await this.AppDataSource.getRepository(Tool).save(tool)
return res.json(results)
})
// Update tool
this.app.put('/api/v1/tools/:id', async (req: Request, res: Response) => {
const tool = await this.AppDataSource.getRepository(Tool).findOneBy({
id: req.params.id
})
if (!tool) {
res.status(404).send(`Tool ${req.params.id} not found`)
return
}
const body = req.body
const updateTool = new Tool()
Object.assign(updateTool, body)
this.AppDataSource.getRepository(Tool).merge(tool, updateTool)
const result = await this.AppDataSource.getRepository(Tool).save(tool)
return res.json(result)
})
// Delete tool
this.app.delete('/api/v1/tools/:id', async (req: Request, res: Response) => {
const results = await this.AppDataSource.getRepository(Tool).delete({ id: req.params.id })
return res.json(results)
})
// ----------------------------------------
// Configuration
// ----------------------------------------
@ -343,12 +462,12 @@ export class App {
// ----------------------------------------
// Get all chatflows for marketplaces
this.app.get('/api/v1/marketplaces', async (req: Request, res: Response) => {
const marketplaceDir = path.join(__dirname, '..', 'marketplaces')
this.app.get('/api/v1/marketplaces/chatflows', async (req: Request, res: Response) => {
const marketplaceDir = path.join(__dirname, '..', 'marketplaces', 'chatflows')
const jsonsInDir = fs.readdirSync(marketplaceDir).filter((file) => path.extname(file) === '.json')
const templates: any[] = []
jsonsInDir.forEach((file, index) => {
const filePath = path.join(__dirname, '..', 'marketplaces', file)
const filePath = path.join(__dirname, '..', 'marketplaces', 'chatflows', file)
const fileData = fs.readFileSync(filePath)
const fileDataObj = JSON.parse(fileData.toString())
const template = {
@ -362,6 +481,25 @@ export class App {
return res.json(templates)
})
// Get all tools for marketplaces
this.app.get('/api/v1/marketplaces/tools', async (req: Request, res: Response) => {
const marketplaceDir = path.join(__dirname, '..', 'marketplaces', 'tools')
const jsonsInDir = fs.readdirSync(marketplaceDir).filter((file) => path.extname(file) === '.json')
const templates: any[] = []
jsonsInDir.forEach((file, index) => {
const filePath = path.join(__dirname, '..', 'marketplaces', 'tools', file)
const fileData = fs.readFileSync(filePath)
const fileDataObj = JSON.parse(fileData.toString())
const template = {
...fileDataObj,
id: index,
templateName: file.split('.json')[0]
}
templates.push(template)
})
return res.json(templates)
})
// ----------------------------------------
// API Keys
// ----------------------------------------
@ -390,6 +528,17 @@ export class App {
return res.json(keys)
})
// Verify api key
this.app.get('/api/v1/verify/apikey/:apiKey', async (req: Request, res: Response) => {
try {
const apiKey = await getApiKey(req.params.apiKey)
if (!apiKey) return res.status(401).send('Unauthorized')
return res.status(200).send('OK')
} catch (err: any) {
return res.status(500).send(err?.message)
}
})
// ----------------------------------------
// Serve UI static
// ----------------------------------------
@ -623,6 +772,7 @@ export class App {
this.nodesPool.componentNodes,
incomingInput.question,
chatId,
this.AppDataSource,
incomingInput?.overrideConfig
)

View File

@ -15,10 +15,15 @@ import {
IOverrideConfig
} from '../Interface'
import { cloneDeep, get, omit, merge } from 'lodash'
import { ICommonObject, getInputVariables } from 'flowise-components'
import { ICommonObject, getInputVariables, IDatabaseEntity } from 'flowise-components'
import { scryptSync, randomBytes, timingSafeEqual } from 'crypto'
import { ChatFlow } from '../entity/ChatFlow'
import { ChatMessage } from '../entity/ChatMessage'
import { Tool } from '../entity/Tool'
import { DataSource } from 'typeorm'
const QUESTION_VAR_PREFIX = 'question'
export const databaseEntities: IDatabaseEntity = { ChatFlow: ChatFlow, ChatMessage: ChatMessage, Tool: Tool }
/**
* Returns the home folder path of the user if
@ -183,6 +188,7 @@ export const buildLangchain = async (
componentNodes: IComponentNodes,
question: string,
chatId: string,
appDataSource: DataSource,
overrideConfig?: ICommonObject
) => {
const flowNodes = cloneDeep(reactFlowNodes)
@ -215,7 +221,11 @@ export const buildLangchain = async (
if (overrideConfig) flowNodeData = replaceInputsWithConfig(flowNodeData, overrideConfig)
const reactFlowNodeData: INodeData = resolveVariables(flowNodeData, flowNodes, question)
flowNodes[nodeIndex].data.instance = await newNodeInstance.init(reactFlowNodeData, question, { chatId })
flowNodes[nodeIndex].data.instance = await newNodeInstance.init(reactFlowNodeData, question, {
chatId,
appDataSource,
databaseEntities
})
} catch (e: any) {
console.error(e)
throw new Error(e)
@ -453,7 +463,7 @@ export const isSameOverrideConfig = (
* @returns {string}
*/
export const getAPIKeyPath = (): string => {
return path.join(__dirname, '..', '..', 'api.json')
return process.env.APIKEY_PATH ? path.join(process.env.APIKEY_PATH, 'api.json') : path.join(__dirname, '..', '..', 'api.json')
}
/**
@ -537,6 +547,18 @@ export const addAPIKey = async (keyName: string): Promise<ICommonObject[]> => {
return content
}
/**
* Get API Key details
* @param {string} apiKey
* @returns {Promise<ICommonObject[]>}
*/
export const getApiKey = async (apiKey: string) => {
const existingAPIKeys = await getAPIKeys()
const keyIndex = existingAPIKeys.findIndex((key) => key.apiKey === apiKey)
if (keyIndex < 0) return undefined
return existingAPIKeys[keyIndex]
}
/**
* Update existing API key
* @param {string} keyIdToUpdate

View File

@ -0,0 +1,16 @@
module.exports = {
webpack: {
configure: {
module: {
rules: [
{
test: /\.m?js$/,
resolve: {
fullySpecified: false
}
}
]
}
}
}
}

View File

@ -1,6 +1,6 @@
{
"name": "flowise-ui",
"version": "1.2.11",
"version": "1.2.13",
"license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://flowiseai.com",
"author": {
@ -13,8 +13,12 @@
"@emotion/styled": "^11.10.6",
"@mui/icons-material": "^5.0.3",
"@mui/material": "^5.11.12",
"@mui/x-data-grid": "^6.8.0",
"@tabler/icons": "^1.39.1",
"clsx": "^1.1.1",
"flowise-embed": "*",
"flowise-embed-react": "*",
"flowise-react-json-view": "*",
"formik": "^2.2.6",
"framer-motion": "^4.1.13",
"history": "^5.0.0",
@ -26,10 +30,10 @@
"prop-types": "^15.7.2",
"react": "^18.2.0",
"react-code-blocks": "^0.0.9-0",
"react-color": "^2.19.3",
"react-datepicker": "^4.8.0",
"react-device-detect": "^1.17.0",
"react-dom": "^18.2.0",
"react-json-view": "^1.21.3",
"react-markdown": "^8.0.6",
"react-perfect-scrollbar": "^1.5.8",
"react-redux": "^8.0.5",
@ -46,11 +50,11 @@
"yup": "^0.32.9"
},
"scripts": {
"start": "react-scripts start",
"dev": "react-scripts start",
"build": "react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject"
"start": "craco start",
"dev": "craco start",
"build": "craco build",
"test": "craco test",
"eject": "craco eject"
},
"babel": {
"presets": [
@ -71,6 +75,7 @@
},
"devDependencies": {
"@babel/eslint-parser": "^7.15.8",
"@craco/craco": "^7.1.0",
"@testing-library/jest-dom": "^5.11.10",
"@testing-library/react": "^14.0.0",
"@testing-library/user-event": "^12.8.3",

View File

@ -1,13 +1,13 @@
<!DOCTYPE html>
<html lang="en">
<head>
<title>Flowise - LangchainJS UI</title>
<title>Flowise - Low-code LLM apps builder</title>
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
<!-- Meta Tags-->
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#2296f3" />
<meta name="title" content="Flowise - LangchainJS UI" />
<meta name="title" content="Flowise - Low-code LLM apps builder" />
<meta name="description" content="Flowise helps you to better integrate Web3 with existing Web2 applications" />
<meta name="keywords" content="react, material-ui, reactjs, reactjs, workflow automation, web3, web2, blockchain" />
<meta name="author" content="CodedThemes" />
@ -17,13 +17,13 @@
<meta property="og:url" content="https://flowiseai.com/" />
<meta property="og:site_name" content="flowiseai.com" />
<meta property="article:publisher" content="https://www.facebook.com/codedthemes" />
<meta property="og:title" content="Flowise - LangchainJS UI" />
<meta property="og:title" content="Flowise - Low-code LLM apps builder" />
<meta property="og:description" content="Flowise helps you to better build LLM flows using Langchain in simple GUI" />
<meta property="og:image" content="https://flowiseai.com/og-image/og-facebook.png" />
<!-- Twitter -->
<meta property="twitter:card" content="summary_large_image" />
<meta property="twitter:url" content="https://flowiseai.com" />
<meta property="twitter:title" content="Flowise - LangchainJS UI" />
<meta property="twitter:title" content="Flowise - Low-code LLM apps builder" />
<meta property="twitter:description" content="Flowise helps you to better build LLM flows using Langchain in simple GUI" />
<meta property="twitter:image" content="https://flowiseai.com/og-image/og-twitter.png" />
<meta name="twitter:creator" content="@codedthemes" />

View File

@ -4,6 +4,8 @@ const getAllChatflows = () => client.get('/chatflows')
const getSpecificChatflow = (id) => client.get(`/chatflows/${id}`)
const getSpecificChatflowFromPublicEndpoint = (id) => client.get(`/public-chatflows/${id}`)
const createNewChatflow = (body) => client.post(`/chatflows`, body)
const updateChatflow = (id, body) => client.put(`/chatflows/${id}`, body)
@ -15,6 +17,7 @@ const getIsChatflowStreaming = (id) => client.get(`/chatflows-streaming/${id}`)
export default {
getAllChatflows,
getSpecificChatflow,
getSpecificChatflowFromPublicEndpoint,
createNewChatflow,
updateChatflow,
deleteChatflow,

View File

@ -1,7 +1,9 @@
import client from './client'
const getAllMarketplaces = () => client.get('/marketplaces')
const getAllChatflowsMarketplaces = () => client.get('/marketplaces/chatflows')
const getAllToolsMarketplaces = () => client.get('/marketplaces/tools')
export default {
getAllMarketplaces
getAllChatflowsMarketplaces,
getAllToolsMarketplaces
}

View File

@ -0,0 +1,19 @@
import client from './client'
const getAllTools = () => client.get('/tools')
const getSpecificTool = (id) => client.get(`/tools/${id}`)
const createNewTool = (body) => client.post(`/tools`, body)
const updateTool = (id, body) => client.put(`/tools/${id}`, body)
const deleteTool = (id) => client.delete(`/tools/${id}`)
export default {
getAllTools,
getSpecificTool,
createNewTool,
updateTool,
deleteTool
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 7.8 KiB

View File

@ -1,8 +1,8 @@
// assets
import { IconHierarchy, IconBuildingStore, IconKey } from '@tabler/icons'
import { IconHierarchy, IconBuildingStore, IconKey, IconTool } from '@tabler/icons'
// constant
const icons = { IconHierarchy, IconBuildingStore, IconKey }
const icons = { IconHierarchy, IconBuildingStore, IconKey, IconTool }
// ==============================|| DASHBOARD MENU ITEMS ||============================== //
@ -27,6 +27,14 @@ const dashboard = {
icon: icons.IconBuildingStore,
breadcrumbs: true
},
{
id: 'tools',
title: 'Tools',
type: 'item',
url: '/tools',
icon: icons.IconTool,
breadcrumbs: true
},
{
id: 'apikey',
title: 'API Keys',

View File

@ -0,0 +1,23 @@
import { lazy } from 'react'
// project imports
import Loadable from 'ui-component/loading/Loadable'
import MinimalLayout from 'layout/MinimalLayout'
// canvas routing
const ChatbotFull = Loadable(lazy(() => import('views/chatbot')))
// ==============================|| CANVAS ROUTING ||============================== //
const ChatbotRoutes = {
path: '/',
element: <MinimalLayout />,
children: [
{
path: '/chatbot/:id',
element: <ChatbotFull />
}
]
}
export default ChatbotRoutes

View File

@ -13,6 +13,9 @@ const Marketplaces = Loadable(lazy(() => import('views/marketplaces')))
// apikey routing
const APIKey = Loadable(lazy(() => import('views/apikey')))
// apikey routing
const Tools = Loadable(lazy(() => import('views/tools')))
// ==============================|| MAIN ROUTING ||============================== //
const MainRoutes = {
@ -34,6 +37,10 @@ const MainRoutes = {
{
path: '/apikey',
element: <APIKey />
},
{
path: '/tools',
element: <Tools />
}
]
}

View File

@ -3,10 +3,11 @@ import { useRoutes } from 'react-router-dom'
// routes
import MainRoutes from './MainRoutes'
import CanvasRoutes from './CanvasRoutes'
import ChatbotRoutes from './ChatbotRoutes'
import config from 'config'
// ==============================|| ROUTING RENDER ||============================== //
export default function ThemeRoutes() {
return useRoutes([MainRoutes, CanvasRoutes], config.basename)
return useRoutes([MainRoutes, CanvasRoutes, ChatbotRoutes], config.basename)
}

View File

@ -136,6 +136,9 @@ export default function componentStyleOverrides(theme) {
'&::placeholder': {
color: theme.darkTextSecondary,
fontSize: '0.875rem'
},
'&.Mui-disabled': {
WebkitTextFillColor: theme?.customization?.isDarkMode ? theme.colors?.grey500 : theme.darkTextSecondary
}
}
}

View File

@ -1,8 +1,8 @@
import PropTypes from 'prop-types'
// material-ui
import { styled, useTheme } from '@mui/material/styles'
import { Box, Grid, Chip, Typography } from '@mui/material'
import { styled } from '@mui/material/styles'
import { Box, Grid, Typography } from '@mui/material'
// project imports
import MainCard from 'ui-component/cards/MainCard'
@ -28,19 +28,6 @@ const CardWrapper = styled(MainCard)(({ theme }) => ({
// ===========================|| CONTRACT CARD ||=========================== //
const ItemCard = ({ isLoading, data, images, onClick }) => {
const theme = useTheme()
const chipSX = {
height: 24,
padding: '0 6px'
}
const activeChatflowSX = {
...chipSX,
color: 'white',
backgroundColor: theme.palette.success.dark
}
return (
<>
{isLoading ? (
@ -49,11 +36,42 @@ const ItemCard = ({ isLoading, data, images, onClick }) => {
<CardWrapper border={false} content={false} onClick={onClick}>
<Box sx={{ p: 2.25 }}>
<Grid container direction='column'>
<div>
<div
style={{
display: 'flex',
flexDirection: 'row',
alignItems: 'center'
}}
>
{data.iconSrc && (
<div
style={{
width: 35,
height: 35,
marginRight: 10,
borderRadius: '50%',
background: `url(${data.iconSrc})`,
backgroundSize: 'contain',
backgroundRepeat: 'no-repeat',
backgroundPosition: 'center center'
}}
></div>
)}
{!data.iconSrc && data.color && (
<div
style={{
width: 35,
height: 35,
marginRight: 10,
borderRadius: '50%',
background: data.color
}}
></div>
)}
<Typography
sx={{ fontSize: '1.5rem', fontWeight: 500, overflowWrap: 'break-word', whiteSpace: 'pre-line' }}
>
{data.name}
{data.templateName || data.name}
</Typography>
</div>
{data.description && (
@ -61,13 +79,6 @@ const ItemCard = ({ isLoading, data, images, onClick }) => {
{data.description}
</span>
)}
<Grid sx={{ mt: 1, mb: 1 }} container direction='row'>
{data.deployed && (
<Grid item>
<Chip label='Deployed' sx={activeChatflowSX} />
</Grid>
)}
</Grid>
{images && (
<div
style={{

View File

@ -1,256 +0,0 @@
import { createPortal } from 'react-dom'
import { useState, useEffect } from 'react'
import { useSelector } from 'react-redux'
import PropTypes from 'prop-types'
import {
Button,
Dialog,
DialogActions,
DialogContent,
Box,
List,
ListItemButton,
ListItem,
ListItemAvatar,
ListItemText,
Typography,
Stack
} from '@mui/material'
import { useTheme } from '@mui/material/styles'
import PerfectScrollbar from 'react-perfect-scrollbar'
import { StyledButton } from 'ui-component/button/StyledButton'
import { DarkCodeEditor } from 'ui-component/editor/DarkCodeEditor'
import { LightCodeEditor } from 'ui-component/editor/LightCodeEditor'
import './EditPromptValuesDialog.css'
import { baseURL } from 'store/constant'
const EditPromptValuesDialog = ({ show, dialogProps, onCancel, onConfirm }) => {
const portalElement = document.getElementById('portal')
const theme = useTheme()
const customization = useSelector((state) => state.customization)
const languageType = 'json'
const [inputValue, setInputValue] = useState('')
const [inputParam, setInputParam] = useState(null)
const [textCursorPosition, setTextCursorPosition] = useState({})
useEffect(() => {
if (dialogProps.value) setInputValue(dialogProps.value)
if (dialogProps.inputParam) setInputParam(dialogProps.inputParam)
return () => {
setInputValue('')
setInputParam(null)
setTextCursorPosition({})
}
}, [dialogProps])
const onMouseUp = (e) => {
if (e.target && e.target.selectionEnd && e.target.value) {
const cursorPosition = e.target.selectionEnd
const textBeforeCursorPosition = e.target.value.substring(0, cursorPosition)
const textAfterCursorPosition = e.target.value.substring(cursorPosition, e.target.value.length)
const body = {
textBeforeCursorPosition,
textAfterCursorPosition
}
setTextCursorPosition(body)
} else {
setTextCursorPosition({})
}
}
const onSelectOutputResponseClick = (node, isUserQuestion = false) => {
let variablePath = isUserQuestion ? `question` : `${node.id}.data.instance`
if (textCursorPosition) {
let newInput = ''
if (textCursorPosition.textBeforeCursorPosition === undefined && textCursorPosition.textAfterCursorPosition === undefined)
newInput = `${inputValue}${`{{${variablePath}}}`}`
else newInput = `${textCursorPosition.textBeforeCursorPosition}{{${variablePath}}}${textCursorPosition.textAfterCursorPosition}`
setInputValue(newInput)
}
}
const component = show ? (
<Dialog open={show} fullWidth maxWidth='md' aria-labelledby='alert-dialog-title' aria-describedby='alert-dialog-description'>
<DialogContent>
<div style={{ display: 'flex', flexDirection: 'row' }}>
{inputParam && inputParam.type === 'string' && (
<div style={{ flex: 70 }}>
<Typography sx={{ mb: 2, ml: 1 }} variant='h4'>
{inputParam.label}
</Typography>
<PerfectScrollbar
style={{
border: '1px solid',
borderColor: theme.palette.grey['500'],
borderRadius: '12px',
height: '100%',
maxHeight: 'calc(100vh - 220px)',
overflowX: 'hidden',
backgroundColor: 'white'
}}
>
{customization.isDarkMode ? (
<DarkCodeEditor
disabled={dialogProps.disabled}
value={inputValue}
onValueChange={(code) => setInputValue(code)}
placeholder={inputParam.placeholder}
type={languageType}
onMouseUp={(e) => onMouseUp(e)}
onBlur={(e) => onMouseUp(e)}
style={{
fontSize: '0.875rem',
minHeight: 'calc(100vh - 220px)',
width: '100%'
}}
/>
) : (
<LightCodeEditor
disabled={dialogProps.disabled}
value={inputValue}
onValueChange={(code) => setInputValue(code)}
placeholder={inputParam.placeholder}
type={languageType}
onMouseUp={(e) => onMouseUp(e)}
onBlur={(e) => onMouseUp(e)}
style={{
fontSize: '0.875rem',
minHeight: 'calc(100vh - 220px)',
width: '100%'
}}
/>
)}
</PerfectScrollbar>
</div>
)}
{!dialogProps.disabled && inputParam && inputParam.acceptVariable && (
<div style={{ flex: 30 }}>
<Stack flexDirection='row' sx={{ mb: 1, ml: 2 }}>
<Typography variant='h4'>Select Variable</Typography>
</Stack>
<PerfectScrollbar style={{ height: '100%', maxHeight: 'calc(100vh - 220px)', overflowX: 'hidden' }}>
<Box sx={{ pl: 2, pr: 2 }}>
<List>
<ListItemButton
sx={{
p: 0,
borderRadius: `${customization.borderRadius}px`,
boxShadow: '0 2px 14px 0 rgb(32 40 45 / 8%)',
mb: 1
}}
disabled={dialogProps.disabled}
onClick={() => onSelectOutputResponseClick(null, true)}
>
<ListItem alignItems='center'>
<ListItemAvatar>
<div
style={{
width: 50,
height: 50,
borderRadius: '50%',
backgroundColor: 'white'
}}
>
<img
style={{
width: '100%',
height: '100%',
padding: 10,
objectFit: 'contain'
}}
alt='AI'
src='https://raw.githubusercontent.com/zahidkhawaja/langchain-chat-nextjs/main/public/parroticon.png'
/>
</div>
</ListItemAvatar>
<ListItemText
sx={{ ml: 1 }}
primary='question'
secondary={`User's question from chatbox`}
/>
</ListItem>
</ListItemButton>
{dialogProps.availableNodesForVariable &&
dialogProps.availableNodesForVariable.length > 0 &&
dialogProps.availableNodesForVariable.map((node, index) => {
const selectedOutputAnchor = node.data.outputAnchors[0].options.find(
(ancr) => ancr.name === node.data.outputs['output']
)
return (
<ListItemButton
key={index}
sx={{
p: 0,
borderRadius: `${customization.borderRadius}px`,
boxShadow: '0 2px 14px 0 rgb(32 40 45 / 8%)',
mb: 1
}}
disabled={dialogProps.disabled}
onClick={() => onSelectOutputResponseClick(node)}
>
<ListItem alignItems='center'>
<ListItemAvatar>
<div
style={{
width: 50,
height: 50,
borderRadius: '50%',
backgroundColor: 'white'
}}
>
<img
style={{
width: '100%',
height: '100%',
padding: 10,
objectFit: 'contain'
}}
alt={node.data.name}
src={`${baseURL}/api/v1/node-icon/${node.data.name}`}
/>
</div>
</ListItemAvatar>
<ListItemText
sx={{ ml: 1 }}
primary={
node.data.inputs.chainName ? node.data.inputs.chainName : node.data.id
}
secondary={`${selectedOutputAnchor?.label ?? 'output'} from ${
node.data.label
}`}
/>
</ListItem>
</ListItemButton>
)
})}
</List>
</Box>
</PerfectScrollbar>
</div>
)}
</div>
</DialogContent>
<DialogActions>
<Button onClick={onCancel}>{dialogProps.cancelButtonName}</Button>
<StyledButton disabled={dialogProps.disabled} variant='contained' onClick={() => onConfirm(inputValue, inputParam.name)}>
{dialogProps.confirmButtonName}
</StyledButton>
</DialogActions>
</Dialog>
) : null
return createPortal(component, portalElement)
}
EditPromptValuesDialog.propTypes = {
show: PropTypes.bool,
dialogProps: PropTypes.object,
onCancel: PropTypes.func,
onConfirm: PropTypes.func
}
export default EditPromptValuesDialog

Some files were not shown because too many files have changed in this diff Show More