Add feature to be able to chain prompt values

This commit is contained in:
Henry 2023-04-16 23:17:08 +01:00
parent 0681a34408
commit 4b9c39cf54
25 changed files with 1496 additions and 246 deletions

View File

@ -11,6 +11,7 @@
],
"scripts": {
"build": "turbo run build",
"build-force": "turbo run build --force",
"dev": "turbo run dev --parallel",
"start": "run-script-os",
"start:windows": "cd packages/server/bin && run start",

View File

@ -1,4 +1,4 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
@ -13,6 +13,7 @@ class LLMChain_Chains implements INode {
baseClasses: string[]
description: string
inputs: INodeParams[]
outputs: INodeOutputsValue[]
constructor() {
this.label = 'LLM Chain'
@ -33,6 +34,13 @@ class LLMChain_Chains implements INode {
name: 'prompt',
type: 'BasePromptTemplate'
},
{
label: 'Chain Name',
name: 'chainName',
type: 'string',
placeholder: 'Task Creation Chain',
optional: true
},
{
label: 'Format Prompt Values',
name: 'promptValues',
@ -42,57 +50,99 @@ class LLMChain_Chains implements INode {
"input_language": "English",
"output_language": "French"
}`,
optional: true
optional: true,
acceptVariable: true,
list: true
}
]
this.outputs = [
{
label: this.label,
name: this.name,
type: this.type
},
{
label: 'Output Prediction',
name: 'outputPrediction',
type: 'string'
}
]
}
async init(nodeData: INodeData): Promise<any> {
async init(nodeData: INodeData, input: string): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel
const prompt = nodeData.inputs?.prompt as BasePromptTemplate
const output = nodeData.outputs?.output as string
const promptValuesStr = nodeData.inputs?.promptValues as string
const chain = new LLMChain({ llm: model, prompt })
return chain
if (output === this.name) {
const chain = new LLMChain({ llm: model, prompt })
return chain
} else if (output === 'outputPrediction') {
const chain = new LLMChain({ llm: model, prompt })
const inputVariables = chain.prompt.inputVariables as string[] // ["product"]
const res = await runPrediction(inputVariables, chain, input, promptValuesStr)
// eslint-disable-next-line no-console
console.log('\x1b[92m\x1b[1m\n*****OUTPUT PREDICTION*****\n\x1b[0m\x1b[0m')
// eslint-disable-next-line no-console
console.log(res)
return res
}
}
async run(nodeData: INodeData, input: string): Promise<string> {
const inputVariables = nodeData.instance.prompt.inputVariables as string[] // ["product"]
const chain = nodeData.instance as LLMChain
const promptValuesStr = nodeData.inputs?.promptValues as string
const res = await runPrediction(inputVariables, chain, input, promptValuesStr)
// eslint-disable-next-line no-console
console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m')
// eslint-disable-next-line no-console
console.log(res)
return res
}
}
if (inputVariables.length === 1) {
const res = await chain.run(input)
return res
} else if (inputVariables.length > 1) {
const promptValuesStr = nodeData.inputs?.promptValues as string
if (!promptValuesStr) throw new Error('Please provide Prompt Values')
const runPrediction = async (inputVariables: string[], chain: LLMChain, input: string, promptValuesStr: string) => {
if (inputVariables.length === 1) {
const res = await chain.run(input)
return res
} else if (inputVariables.length > 1) {
if (!promptValuesStr) throw new Error('Please provide Prompt Values')
const promptValues = JSON.parse(promptValuesStr.replace(/\s/g, ''))
const promptValues = JSON.parse(promptValuesStr.replace(/\s/g, ''))
let seen: string[] = []
let seen: string[] = []
for (const variable of inputVariables) {
seen.push(variable)
if (promptValues[variable]) {
seen.pop()
}
for (const variable of inputVariables) {
seen.push(variable)
if (promptValues[variable]) {
seen.pop()
}
if (seen.length === 1) {
const lastValue = seen.pop()
if (!lastValue) throw new Error('Please provide Prompt Values')
const options = {
...promptValues,
[lastValue]: input
}
const res = await chain.call(options)
return res?.text
} else {
throw new Error('Please provide Prompt Values')
}
} else {
const res = await chain.run(input)
return res
}
if (seen.length === 0) {
// All inputVariables have fixed values specified
const options = {
...promptValues
}
const res = await chain.call(options)
return res?.text
} else if (seen.length === 1) {
// If one inputVariable is not specify, use input (user's question) as value
const lastValue = seen.pop()
if (!lastValue) throw new Error('Please provide Prompt Values')
const options = {
...promptValues,
[lastValue]: input
}
const res = await chain.call(options)
return res?.text
} else {
throw new Error(`Please provide Prompt Values for: ${seen.join(', ')}`)
}
} else {
const res = await chain.run(input)
return res
}
}

View File

@ -2,18 +2,7 @@
* Types
*/
export type NodeParamsType =
| 'asyncOptions'
| 'options'
| 'string'
| 'number'
| 'boolean'
| 'password'
| 'json'
| 'code'
| 'date'
| 'file'
| 'folder'
export type NodeParamsType = 'options' | 'string' | 'number' | 'boolean' | 'password' | 'json' | 'code' | 'date' | 'file' | 'folder'
export type CommonType = string | number | boolean | undefined | null
@ -40,6 +29,13 @@ export interface INodeOptionsValue {
description?: string
}
export interface INodeOutputsValue {
label: string
name: string
type: string
description?: string
}
export interface INodeParams {
label: string
name: string
@ -50,6 +46,7 @@ export interface INodeParams {
optional?: boolean | INodeDisplay
rows?: number
list?: boolean
acceptVariable?: boolean
placeholder?: string
fileType?: string
}
@ -75,12 +72,15 @@ export interface INodeProperties {
export interface INode extends INodeProperties {
inputs?: INodeParams[]
getInstance?(nodeData: INodeData): Promise<string>
output?: INodeOutputsValue[]
init?(nodeData: INodeData, input: string, options?: ICommonObject): Promise<any>
run?(nodeData: INodeData, input: string, options?: ICommonObject): Promise<string>
}
export interface INodeData extends INodeProperties {
id: string
inputs?: ICommonObject
outputs?: ICommonObject
instance?: any
}

View File

@ -3,7 +3,7 @@
"nodes": [
{
"width": 300,
"height": 360,
"height": 366,
"id": "promptTemplate_0",
"position": {
"x": 294.38456937448433,
@ -50,7 +50,7 @@
},
{
"width": 300,
"height": 886,
"height": 905,
"id": "fewShotPromptTemplate_0",
"position": {
"x": 719.2200337843097,
@ -223,11 +223,11 @@
},
{
"width": 300,
"height": 461,
"height": 592,
"id": "llmChain_0",
"position": {
"x": 1499.2654451385026,
"y": 356.3275374721362
"x": 1489.0277667172852,
"y": 357.461975349771
},
"type": "customNode",
"data": {
@ -239,13 +239,24 @@
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Chain Name",
"name": "chainName",
"type": "string",
"placeholder": "Task Creation Chain",
"optional": true,
"id": "llmChain_0-input-chainName-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 5,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true
"optional": true,
"acceptVariable": true,
"list": true,
"id": "llmChain_0-input-promptValues-string"
}
],
"inputAnchors": [
@ -265,22 +276,40 @@
"inputs": {
"model": "{{openAI_0.data.instance}}",
"prompt": "{{fewShotPromptTemplate_0.data.instance}}",
"chainName": "",
"promptValues": ""
},
"outputAnchors": [
{
"id": "llmChain_0-output-llmChain-LLMChain|BaseChain",
"name": "llmChain",
"label": "LLMChain",
"type": "LLMChain | BaseChain"
"name": "output",
"label": "Output",
"type": "options",
"options": [
{
"id": "llmChain_0-output-llmChain-LLMChain|BaseChain",
"name": "llmChain",
"label": "LLM Chain",
"type": "LLMChain | BaseChain"
},
{
"id": "llmChain_0-output-outputPrediction-string",
"name": "outputPrediction",
"label": "Output Prediction",
"type": "string"
}
],
"default": "llmChain"
}
],
"outputs": {
"output": "llmChain"
},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1499.2654451385026,
"y": 356.3275374721362
"x": 1489.0277667172852,
"y": 357.461975349771
},
"dragging": false
}

View File

@ -0,0 +1,508 @@
{
"description": "Use output from a chain as prompt for another chain",
"nodes": [
{
"width": 300,
"height": 592,
"id": "llmChain_0",
"position": {
"x": 586.058087758348,
"y": 109.99914917840562
},
"type": "customNode",
"data": {
"id": "llmChain_0",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["LLMChain", "BaseChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Chain Name",
"name": "chainName",
"type": "string",
"placeholder": "Task Creation Chain",
"optional": true,
"id": "llmChain_0-input-chainName-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 5,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "llmChain_0-input-promptValues-string"
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "llmChain_0-input-model-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_0-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"model": "{{openAI_0.data.instance}}",
"prompt": "{{promptTemplate_0.data.instance}}",
"chainName": "FirstChain",
"promptValues": "{\n \"objective\": \"{{question}}\"\n}"
},
"outputAnchors": [
{
"name": "output",
"label": "Output",
"type": "options",
"options": [
{
"id": "llmChain_0-output-llmChain-LLMChain|BaseChain",
"name": "llmChain",
"label": "LLM Chain",
"type": "LLMChain | BaseChain"
},
{
"id": "llmChain_0-output-outputPrediction-string",
"name": "outputPrediction",
"label": "Output Prediction",
"type": "string"
}
],
"default": "llmChain"
}
],
"outputs": {
"output": "outputPrediction"
},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 586.058087758348,
"y": 109.99914917840562
},
"dragging": false
},
{
"width": 300,
"height": 366,
"id": "promptTemplate_0",
"position": {
"x": 231.20329590069747,
"y": 313.54994365714185
},
"type": "customNode",
"data": {
"id": "promptTemplate_0",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 5,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_0-input-template-string"
}
],
"inputAnchors": [],
"inputs": {
"template": "You are an AI who performs one task based on the following objective: {objective}.\nRespond with how you would complete this task:"
},
"outputAnchors": [
{
"id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 231.20329590069747,
"y": 313.54994365714185
},
"dragging": false
},
{
"width": 300,
"height": 592,
"id": "llmChain_1",
"position": {
"x": 1637.4327907249694,
"y": 127.71255193457947
},
"type": "customNode",
"data": {
"id": "llmChain_1",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["LLMChain", "BaseChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Chain Name",
"name": "chainName",
"type": "string",
"placeholder": "Task Creation Chain",
"optional": true,
"id": "llmChain_1-input-chainName-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 5,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "llmChain_1-input-promptValues-string"
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "llmChain_1-input-model-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_1-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"model": "{{openAI_0.data.instance}}",
"prompt": "{{promptTemplate_1.data.instance}}",
"chainName": "FinalChain",
"promptValues": "{\n \"objective\": \"{{question}}\",\n \"result\": \"{{llmChain_0.data.instance}}\"\n}"
},
"outputAnchors": [
{
"name": "output",
"label": "Output",
"type": "options",
"options": [
{
"id": "llmChain_1-output-llmChain-LLMChain|BaseChain",
"name": "llmChain",
"label": "LLM Chain",
"type": "LLMChain | BaseChain"
},
{
"id": "llmChain_1-output-outputPrediction-string",
"name": "outputPrediction",
"label": "Output Prediction",
"type": "string"
}
],
"default": "llmChain"
}
],
"outputs": {
"output": "llmChain"
},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1637.4327907249694,
"y": 127.71255193457947
},
"dragging": false
},
{
"width": 300,
"height": 366,
"id": "promptTemplate_1",
"position": {
"x": 950.292796637893,
"y": 62.31864791878181
},
"type": "customNode",
"data": {
"id": "promptTemplate_1",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 5,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_1-input-template-string"
}
],
"inputAnchors": [],
"inputs": {
"template": "You are a task creation AI that uses the result of an execution agent to create new tasks with the following objective: {objective}.\nThe last completed task has the result: {result}.\nBased on the result, create new tasks to be completed by the AI system that do not overlap with result.\nReturn the tasks as an array."
},
"outputAnchors": [
{
"id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 950.292796637893,
"y": 62.31864791878181
},
"dragging": false
},
{
"width": 300,
"height": 472,
"id": "openAI_0",
"position": {
"x": 225.7603660247592,
"y": -193.45016241085625
},
"type": "customNode",
"data": {
"id": "openAI_0",
"label": "OpenAI",
"name": "openAI",
"type": "OpenAI",
"baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Wrapper around OpenAI large language models",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password",
"id": "openAI_0-input-openAIApiKey-password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "text-davinci-003",
"name": "text-davinci-003"
},
{
"label": "text-davinci-002",
"name": "text-davinci-002"
},
{
"label": "text-curie-001",
"name": "text-curie-001"
},
{
"label": "text-babbage-001",
"name": "text-babbage-001"
}
],
"default": "text-davinci-003",
"optional": true,
"id": "openAI_0-input-modelName-options"
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.7,
"optional": true,
"id": "openAI_0-input-temperature-number"
}
],
"inputAnchors": [],
"inputs": {
"modelName": "text-davinci-003",
"temperature": "0"
},
"outputAnchors": [
{
"id": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"name": "openAI",
"label": "OpenAI",
"type": "OpenAI | BaseLLM | BaseLanguageModel"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"dragging": false,
"positionAbsolute": {
"x": 225.7603660247592,
"y": -193.45016241085625
}
},
{
"width": 300,
"height": 472,
"id": "openAI_1",
"position": {
"x": 1275.7643968219816,
"y": -197.07668364123862
},
"type": "customNode",
"data": {
"id": "openAI_1",
"label": "OpenAI",
"name": "openAI",
"type": "OpenAI",
"baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Wrapper around OpenAI large language models",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password",
"id": "openAI_0-input-openAIApiKey-password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "text-davinci-003",
"name": "text-davinci-003"
},
{
"label": "text-davinci-002",
"name": "text-davinci-002"
},
{
"label": "text-curie-001",
"name": "text-curie-001"
},
{
"label": "text-babbage-001",
"name": "text-babbage-001"
}
],
"default": "text-davinci-003",
"optional": true,
"id": "openAI_0-input-modelName-options"
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.7,
"optional": true,
"id": "openAI_0-input-temperature-number"
}
],
"inputAnchors": [],
"inputs": {
"modelName": "text-davinci-003",
"temperature": "0"
},
"outputAnchors": [
{
"id": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"name": "openAI",
"label": "OpenAI",
"type": "OpenAI | BaseLLM | BaseLanguageModel"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"dragging": false,
"positionAbsolute": {
"x": 1275.7643968219816,
"y": -197.07668364123862
}
}
],
"edges": [
{
"source": "promptTemplate_0",
"sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}
},
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "openAI_0-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-model-BaseLanguageModel",
"data": {
"label": ""
}
},
{
"source": "promptTemplate_1",
"sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_1",
"targetHandle": "llmChain_1-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}
},
{
"source": "llmChain_0",
"sourceHandle": "llmChain_0-output-outputPrediction-string",
"target": "llmChain_1",
"targetHandle": "llmChain_1-input-promptValues-string",
"type": "buttonedge",
"id": "llmChain_0-llmChain_0-output-outputPrediction-string-llmChain_1-llmChain_1-input-promptValues-string",
"data": {
"label": ""
}
},
{
"source": "openAI_1",
"sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"target": "llmChain_1",
"targetHandle": "llmChain_1-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "openAI_1-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_1-llmChain_1-input-model-BaseLanguageModel",
"data": {
"label": ""
}
}
]
}

View File

@ -81,7 +81,7 @@
},
{
"width": 300,
"height": 360,
"height": 366,
"id": "promptTemplate_0",
"position": {
"x": 970.576876549135,
@ -128,11 +128,11 @@
},
{
"width": 300,
"height": 461,
"height": 592,
"id": "llmChain_0",
"position": {
"x": 1414.1175742139496,
"y": 340.4040954840462
"x": 1386.5063477084716,
"y": 211.47670100294192
},
"type": "customNode",
"data": {
@ -144,13 +144,24 @@
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Chain Name",
"name": "chainName",
"type": "string",
"placeholder": "Task Creation Chain",
"optional": true,
"id": "llmChain_0-input-chainName-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 5,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true
"optional": true,
"acceptVariable": true,
"list": true,
"id": "llmChain_0-input-promptValues-string"
}
],
"inputAnchors": [
@ -170,38 +181,45 @@
"inputs": {
"model": "{{openAI_0.data.instance}}",
"prompt": "{{promptTemplate_0.data.instance}}",
"chainName": "CompanyName Chain",
"promptValues": ""
},
"outputAnchors": [
{
"id": "llmChain_0-output-llmChain-LLMChain|BaseChain",
"name": "llmChain",
"label": "LLMChain",
"type": "LLMChain | BaseChain"
"name": "output",
"label": "Output",
"type": "options",
"options": [
{
"id": "llmChain_0-output-llmChain-LLMChain|BaseChain",
"name": "llmChain",
"label": "LLM Chain",
"type": "LLMChain | BaseChain"
},
{
"id": "llmChain_0-output-outputPrediction-string",
"name": "outputPrediction",
"label": "Output Prediction",
"type": "string"
}
],
"default": "llmChain"
}
],
"outputs": {
"output": "llmChain"
},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1414.1175742139496,
"y": 340.4040954840462
"x": 1386.5063477084716,
"y": 211.47670100294192
},
"dragging": false
}
],
"edges": [
{
"source": "promptTemplate_0",
"sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}
},
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
@ -212,6 +230,17 @@
"data": {
"label": ""
}
},
{
"source": "promptTemplate_0",
"sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}
}
]
}

View File

@ -1,13 +1,14 @@
{
"description": "Language translation using LLM Chain with a Chat Prompt Template and Chat Model",
"nodes": [
{
"width": 300,
"height": 460,
"height": 473,
"id": "chatPromptTemplate_0",
"position": {
"x": 524,
"y": 237
"x": 906.3845860429262,
"y": 522.7223115041937
},
"type": "customNode",
"data": {
@ -52,8 +53,8 @@
"selected": false,
"dragging": false,
"positionAbsolute": {
"x": 524,
"y": 237
"x": 906.3845860429262,
"y": 522.7223115041937
}
},
{
@ -61,8 +62,8 @@
"height": 472,
"id": "chatOpenAI_0",
"position": {
"x": 855.1997276913991,
"y": 24.090553068402556
"x": 909.2168811101023,
"y": 10.159813502526418
},
"type": "customNode",
"data": {
@ -133,18 +134,18 @@
},
"selected": false,
"positionAbsolute": {
"x": 855.1997276913991,
"y": 24.090553068402556
"x": 909.2168811101023,
"y": 10.159813502526418
},
"dragging": false
},
{
"width": 300,
"height": 461,
"height": 592,
"id": "llmChain_0",
"position": {
"x": 1192.2235692202612,
"y": 361.71736677076257
"x": 1318.8661313433918,
"y": 323.51085023894643
},
"type": "customNode",
"data": {
@ -156,13 +157,24 @@
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Chain Name",
"name": "chainName",
"type": "string",
"placeholder": "Task Creation Chain",
"optional": true,
"id": "llmChain_0-input-chainName-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 5,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true
"optional": true,
"acceptVariable": true,
"list": true,
"id": "llmChain_0-input-promptValues-string"
}
],
"inputAnchors": [
@ -182,22 +194,40 @@
"inputs": {
"model": "{{chatOpenAI_0.data.instance}}",
"prompt": "{{chatPromptTemplate_0.data.instance}}",
"chainName": "",
"promptValues": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}"
},
"outputAnchors": [
{
"id": "llmChain_0-output-llmChain-LLMChain|BaseChain",
"name": "llmChain",
"label": "LLMChain",
"type": "LLMChain | BaseChain"
"name": "output",
"label": "Output",
"type": "options",
"options": [
{
"id": "llmChain_0-output-llmChain-LLMChain|BaseChain",
"name": "llmChain",
"label": "LLM Chain",
"type": "LLMChain | BaseChain"
},
{
"id": "llmChain_0-output-outputPrediction-string",
"name": "outputPrediction",
"label": "Output Prediction",
"type": "string"
}
],
"default": "llmChain"
}
],
"outputs": {
"output": "llmChain"
},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1192.2235692202612,
"y": 361.71736677076257
"x": 1318.8661313433918,
"y": 323.51085023894643
},
"dragging": false
}

View File

@ -1,9 +1,8 @@
import { INodeData } from 'flowise-components'
import { IActiveChatflows } from './Interface'
import { IActiveChatflows, INodeData } from './Interface'
/**
* This pool is to keep track of active test triggers (event listeners),
* so we can clear the event listeners whenever user refresh or exit page
* This pool is to keep track of active chatflow pools
* so we can prevent building langchain flow all over again
*/
export class ChatflowPool {
activeChatflows: IActiveChatflows = {}

View File

@ -1,4 +1,4 @@
import { INode, INodeData } from 'flowise-components'
import { INode, INodeData as INodeDataFromComponent, INodeParams } from 'flowise-components'
export type MessageType = 'apiMessage' | 'userMessage'
@ -38,6 +38,12 @@ export interface INodeDirectedGraph {
[key: string]: string[]
}
export interface INodeData extends INodeDataFromComponent {
inputAnchors: INodeParams[]
inputParams: INodeParams[]
outputAnchors: INodeParams[]
}
export interface IReactFlowNode {
id: string
position: {

View File

@ -4,15 +4,22 @@ import cors from 'cors'
import http from 'http'
import * as fs from 'fs'
import { IChatFlow, IncomingInput, IReactFlowNode, IReactFlowObject } from './Interface'
import { getNodeModulesPackagePath, getStartingNodes, buildLangchain, getEndingNode, constructGraphs } from './utils'
import { IChatFlow, IncomingInput, IReactFlowNode, IReactFlowObject, INodeData } from './Interface'
import {
getNodeModulesPackagePath,
getStartingNodes,
buildLangchain,
getEndingNode,
constructGraphs,
resolveVariables,
checkIfFlowNeedToRebuild
} from './utils'
import { cloneDeep } from 'lodash'
import { getDataSource } from './DataSource'
import { NodesPool } from './NodesPool'
import { ChatFlow } from './entity/ChatFlow'
import { ChatMessage } from './entity/ChatMessage'
import { ChatflowPool } from './ChatflowPool'
import { INodeData } from 'flowise-components'
export class App {
app: express.Application
@ -196,44 +203,61 @@ export class App {
let nodeToExecuteData: INodeData
const chatflow = await this.AppDataSource.getRepository(ChatFlow).findOneBy({
id: chatflowid
})
if (!chatflow) return res.status(404).send(`Chatflow ${chatflowid} not found`)
const flowData = chatflow.flowData
const parsedFlowData: IReactFlowObject = JSON.parse(flowData)
const nodes = parsedFlowData.nodes
const edges = parsedFlowData.edges
// Check if node data exists in pool && not out of sync, prevent building whole flow again
if (
Object.prototype.hasOwnProperty.call(this.chatflowPool.activeChatflows, chatflowid) &&
this.chatflowPool.activeChatflows[chatflowid].inSync
this.chatflowPool.activeChatflows[chatflowid].inSync &&
!checkIfFlowNeedToRebuild(nodes, this.chatflowPool.activeChatflows[chatflowid].endingNodeData)
) {
nodeToExecuteData = this.chatflowPool.activeChatflows[chatflowid].endingNodeData
} else {
const chatflow = await this.AppDataSource.getRepository(ChatFlow).findOneBy({
id: chatflowid
})
if (!chatflow) return res.status(404).send(`Chatflow ${chatflowid} not found`)
const flowData = chatflow.flowData
const parsedFlowData: IReactFlowObject = JSON.parse(flowData)
/*** Get Ending Node with Directed Graph ***/
const { graph, nodeDependencies } = constructGraphs(parsedFlowData.nodes, parsedFlowData.edges)
const { graph, nodeDependencies } = constructGraphs(nodes, edges)
const directedGraph = graph
const endingNodeId = getEndingNode(nodeDependencies, directedGraph)
if (!endingNodeId) return res.status(500).send(`Ending node must be either a Chain or Agent`)
const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data
if (!endingNodeData) return res.status(500).send(`Ending node must be either a Chain or Agent`)
if (!Object.values(endingNodeData.outputs ?? {}).includes(endingNodeData.name)) {
return res
.status(500)
.send(
`Output of ${endingNodeData.label} (${endingNodeData.id}) must be ${endingNodeData.label}, can't be an Output Prediction`
)
}
/*** Get Starting Nodes with Non-Directed Graph ***/
const constructedObj = constructGraphs(parsedFlowData.nodes, parsedFlowData.edges, true)
const constructedObj = constructGraphs(nodes, edges, true)
const nonDirectedGraph = constructedObj.graph
const { startingNodeIds, depthQueue } = getStartingNodes(nonDirectedGraph, endingNodeId)
/*** BFS to traverse from Starting Nodes to Ending Node ***/
const reactFlowNodes = await buildLangchain(
startingNodeIds,
parsedFlowData.nodes,
nodes,
graph,
depthQueue,
this.nodesPool.componentNodes
this.nodesPool.componentNodes,
incomingInput.question
)
const nodeToExecute = reactFlowNodes.find((node: IReactFlowNode) => node.id === endingNodeId)
if (!nodeToExecute) return res.status(404).send(`Node ${endingNodeId} not found`)
nodeToExecuteData = nodeToExecute.data
const reactFlowNodeData: INodeData = resolveVariables(nodeToExecute.data, reactFlowNodes, incomingInput.question)
nodeToExecuteData = reactFlowNodeData
this.chatflowPool.add(chatflowid, nodeToExecuteData)
}

View File

@ -8,10 +8,14 @@ import {
INodeDirectedGraph,
INodeQueue,
IReactFlowEdge,
IReactFlowNode
IReactFlowNode,
IVariableDict,
INodeData
} from '../Interface'
import { cloneDeep, get } from 'lodash'
import { ICommonObject, INodeData } from 'flowise-components'
import { ICommonObject } from 'flowise-components'
const QUESTION_VAR_PREFIX = 'question'
/**
* Returns the home folder path of the user if
@ -166,13 +170,15 @@ export const getEndingNode = (nodeDependencies: INodeDependencies, graph: INodeD
* @param {INodeDirectedGraph} graph
* @param {IDepthQueue} depthQueue
* @param {IComponentNodes} componentNodes
* @param {string} question
*/
export const buildLangchain = async (
startingNodeIds: string[],
reactFlowNodes: IReactFlowNode[],
graph: INodeDirectedGraph,
depthQueue: IDepthQueue,
componentNodes: IComponentNodes
componentNodes: IComponentNodes,
question: string
) => {
const flowNodes = cloneDeep(reactFlowNodes)
@ -200,9 +206,9 @@ export const buildLangchain = async (
const nodeModule = await import(nodeInstanceFilePath)
const newNodeInstance = new nodeModule.nodeClass()
const reactFlowNodeData: INodeData = resolveVariables(reactFlowNode.data, flowNodes)
const reactFlowNodeData: INodeData = resolveVariables(reactFlowNode.data, flowNodes, question)
flowNodes[nodeIndex].data.instance = await newNodeInstance.init(reactFlowNodeData)
flowNodes[nodeIndex].data.instance = await newNodeInstance.init(reactFlowNodeData, question)
} catch (e: any) {
console.error(e)
throw new Error(e)
@ -247,11 +253,14 @@ export const buildLangchain = async (
* Get variable value from outputResponses.output
* @param {string} paramValue
* @param {IReactFlowNode[]} reactFlowNodes
* @param {string} question
* @param {boolean} isAcceptVariable
* @returns {string}
*/
export const getVariableValue = (paramValue: string, reactFlowNodes: IReactFlowNode[]) => {
export const getVariableValue = (paramValue: string, reactFlowNodes: IReactFlowNode[], question: string, isAcceptVariable = false) => {
let returnVal = paramValue
const variableStack = []
const variableDict = {} as IVariableDict
let startIdx = 0
const endIdx = returnVal.length - 1
@ -269,17 +278,36 @@ export const getVariableValue = (paramValue: string, reactFlowNodes: IReactFlowN
const variableEndIdx = startIdx
const variableFullPath = returnVal.substring(variableStartIdx, variableEndIdx)
if (isAcceptVariable && variableFullPath === QUESTION_VAR_PREFIX) {
variableDict[`{{${variableFullPath}}}`] = question
}
// Split by first occurence of '.' to get just nodeId
const [variableNodeId, _] = variableFullPath.split('.')
const executedNode = reactFlowNodes.find((nd) => nd.id === variableNodeId)
if (executedNode) {
const variableInstance = get(executedNode.data, 'instance')
returnVal = variableInstance
const variableValue = get(executedNode.data, 'instance')
if (isAcceptVariable) {
variableDict[`{{${variableFullPath}}}`] = variableValue
} else {
returnVal = variableValue
}
}
variableStack.pop()
}
startIdx += 1
}
if (isAcceptVariable) {
const variablePaths = Object.keys(variableDict)
variablePaths.sort() // Sort by length of variable path because longer path could possibly contains nested variable
variablePaths.forEach((path) => {
const variableValue = variableDict[path]
// Replace all occurence
returnVal = returnVal.split(path).join(variableValue)
})
return returnVal
}
return returnVal
}
@ -287,25 +315,26 @@ export const getVariableValue = (paramValue: string, reactFlowNodes: IReactFlowN
* Loop through each inputs and resolve variable if neccessary
* @param {INodeData} reactFlowNodeData
* @param {IReactFlowNode[]} reactFlowNodes
* @param {string} question
* @returns {INodeData}
*/
export const resolveVariables = (reactFlowNodeData: INodeData, reactFlowNodes: IReactFlowNode[]): INodeData => {
export const resolveVariables = (reactFlowNodeData: INodeData, reactFlowNodes: IReactFlowNode[], question: string): INodeData => {
const flowNodeData = cloneDeep(reactFlowNodeData)
const types = 'inputs'
const getParamValues = (paramsObj: ICommonObject) => {
for (const key in paramsObj) {
const paramValue: string = paramsObj[key]
if (Array.isArray(paramValue)) {
const resolvedInstances = []
for (const param of paramValue) {
const resolvedInstance = getVariableValue(param, reactFlowNodes)
const resolvedInstance = getVariableValue(param, reactFlowNodes, question)
resolvedInstances.push(resolvedInstance)
}
paramsObj[key] = resolvedInstances
} else {
const resolvedInstance = getVariableValue(paramValue, reactFlowNodes)
const isAcceptVariable = reactFlowNodeData.inputParams.find((param) => param.name === key)?.acceptVariable ?? false
const resolvedInstance = getVariableValue(paramValue, reactFlowNodes, question, isAcceptVariable)
paramsObj[key] = resolvedInstance
}
}
@ -317,3 +346,24 @@ export const resolveVariables = (reactFlowNodeData: INodeData, reactFlowNodes: I
return flowNodeData
}
/**
* Rebuild flow if LLMChain has dependency on other chains
* User Question => Prompt_0 => LLMChain_0 => Prompt-1 => LLMChain_1
* @param {IReactFlowNode[]} nodes
* @param {INodeData} nodeData
* @returns {boolean}
*/
export const checkIfFlowNeedToRebuild = (nodes: IReactFlowNode[], nodeData: INodeData) => {
if (nodeData.name !== 'llmChain') return false
const node = nodes.find((nd) => nd.id === nodeData.id)
if (!node) throw new Error(`Node ${nodeData.id} not found`)
const inputs = node.data.inputs
for (const key in inputs) {
const isInputAcceptVariable = node.data.inputParams.find((param) => param.name === key)?.acceptVariable || false
if (isInputAcceptVariable && inputs[key].includes('{{') && inputs[key].includes('}}')) return true
}
return false
}

View File

@ -1,9 +1,12 @@
import { createContext, useState } from 'react'
import PropTypes from 'prop-types'
import { getUniqueNodeId } from 'utils/genericHelper'
import { cloneDeep } from 'lodash'
const initialValue = {
reactFlowInstance: null,
setReactFlowInstance: () => {},
duplicateNode: () => {},
deleteNode: () => {},
deleteEdge: () => {}
}
@ -22,13 +25,53 @@ export const ReactFlowContext = ({ children }) => {
reactFlowInstance.setEdges(reactFlowInstance.getEdges().filter((edge) => edge.id !== id))
}
const duplicateNode = (id) => {
const nodes = reactFlowInstance.getNodes()
const originalNode = nodes.find((n) => n.id === id)
if (originalNode) {
const newNodeId = getUniqueNodeId(originalNode.data, nodes)
const clonedNode = cloneDeep(originalNode)
const duplicatedNode = {
...clonedNode,
id: newNodeId,
position: {
x: clonedNode.position.x + 400,
y: clonedNode.position.y
},
positionAbsolute: {
x: clonedNode.positionAbsolute.x + 400,
y: clonedNode.positionAbsolute.y
},
data: {
...clonedNode.data,
id: newNodeId
},
selected: false
}
const dataKeys = ['inputParams', 'inputAnchors', 'outputAnchors']
for (const key of dataKeys) {
for (const item of duplicatedNode.data[key]) {
if (item.id) {
item.id = item.id.replace(id, newNodeId)
}
}
}
reactFlowInstance.setNodes([...nodes, duplicatedNode])
}
}
return (
<flowContext.Provider
value={{
reactFlowInstance,
setReactFlowInstance,
deleteNode,
deleteEdge
deleteEdge,
duplicateNode
}}
>
{children}

View File

@ -0,0 +1,6 @@
.editor__textarea {
outline: 0;
}
.editor__textarea::placeholder {
color: rgba(120, 120, 120, 0.5);
}

View File

@ -0,0 +1,256 @@
import { createPortal } from 'react-dom'
import { useState, useEffect } from 'react'
import { useSelector } from 'react-redux'
import PropTypes from 'prop-types'
import {
Button,
Dialog,
DialogActions,
DialogContent,
Box,
List,
ListItemButton,
ListItem,
ListItemAvatar,
ListItemText,
Typography,
Stack
} from '@mui/material'
import { useTheme } from '@mui/material/styles'
import PerfectScrollbar from 'react-perfect-scrollbar'
import { StyledButton } from 'ui-component/button/StyledButton'
import { DarkCodeEditor } from 'ui-component/editor/DarkCodeEditor'
import { LightCodeEditor } from 'ui-component/editor/LightCodeEditor'
import './EditPromptValuesDialog.css'
import { baseURL } from 'store/constant'
const EditPromptValuesDialog = ({ show, dialogProps, onCancel, onConfirm }) => {
const portalElement = document.getElementById('portal')
const theme = useTheme()
const customization = useSelector((state) => state.customization)
const languageType = 'json'
const [inputValue, setInputValue] = useState('')
const [inputParam, setInputParam] = useState(null)
const [textCursorPosition, setTextCursorPosition] = useState({})
useEffect(() => {
if (dialogProps.value) setInputValue(dialogProps.value)
if (dialogProps.inputParam) setInputParam(dialogProps.inputParam)
return () => {
setInputValue('')
setInputParam(null)
setTextCursorPosition({})
}
}, [dialogProps])
const onMouseUp = (e) => {
if (e.target && e.target.selectionEnd && e.target.value) {
const cursorPosition = e.target.selectionEnd
const textBeforeCursorPosition = e.target.value.substring(0, cursorPosition)
const textAfterCursorPosition = e.target.value.substring(cursorPosition, e.target.value.length)
const body = {
textBeforeCursorPosition,
textAfterCursorPosition
}
setTextCursorPosition(body)
} else {
setTextCursorPosition({})
}
}
const onSelectOutputResponseClick = (node, isUserQuestion = false) => {
let variablePath = isUserQuestion ? `question` : `${node.id}.data.instance`
if (textCursorPosition) {
let newInput = ''
if (textCursorPosition.textBeforeCursorPosition === undefined && textCursorPosition.textAfterCursorPosition === undefined)
newInput = `${inputValue}${`{{${variablePath}}}`}`
else newInput = `${textCursorPosition.textBeforeCursorPosition}{{${variablePath}}}${textCursorPosition.textAfterCursorPosition}`
setInputValue(newInput)
}
}
const component = show ? (
<Dialog open={show} fullWidth maxWidth='md' aria-labelledby='alert-dialog-title' aria-describedby='alert-dialog-description'>
<DialogContent>
<div style={{ display: 'flex', flexDirection: 'row' }}>
{inputParam && inputParam.type === 'string' && (
<div style={{ flex: 70 }}>
<Typography sx={{ mb: 2, ml: 1 }} variant='h4'>
{inputParam.label}
</Typography>
<PerfectScrollbar
style={{
border: '1px solid',
borderColor: theme.palette.grey['500'],
borderRadius: '12px',
height: '100%',
maxHeight: 'calc(100vh - 220px)',
overflowX: 'hidden',
backgroundColor: 'white'
}}
>
{customization.isDarkMode ? (
<DarkCodeEditor
disabled={dialogProps.disabled}
value={inputValue}
onValueChange={(code) => setInputValue(code)}
placeholder={inputParam.placeholder}
type={languageType}
onMouseUp={(e) => onMouseUp(e)}
onBlur={(e) => onMouseUp(e)}
style={{
fontSize: '0.875rem',
minHeight: 'calc(100vh - 220px)',
width: '100%'
}}
/>
) : (
<LightCodeEditor
disabled={dialogProps.disabled}
value={inputValue}
onValueChange={(code) => setInputValue(code)}
placeholder={inputParam.placeholder}
type={languageType}
onMouseUp={(e) => onMouseUp(e)}
onBlur={(e) => onMouseUp(e)}
style={{
fontSize: '0.875rem',
minHeight: 'calc(100vh - 220px)',
width: '100%'
}}
/>
)}
</PerfectScrollbar>
</div>
)}
{!dialogProps.disabled && inputParam && inputParam.acceptVariable && (
<div style={{ flex: 30 }}>
<Stack flexDirection='row' sx={{ mb: 1, ml: 2 }}>
<Typography variant='h4'>Select Variable</Typography>
</Stack>
<PerfectScrollbar style={{ height: '100%', maxHeight: 'calc(100vh - 220px)', overflowX: 'hidden' }}>
<Box sx={{ pl: 2, pr: 2 }}>
<List>
<ListItemButton
sx={{
p: 0,
borderRadius: `${customization.borderRadius}px`,
boxShadow: '0 2px 14px 0 rgb(32 40 45 / 8%)',
mb: 1
}}
disabled={dialogProps.disabled}
onClick={() => onSelectOutputResponseClick(null, true)}
>
<ListItem alignItems='center'>
<ListItemAvatar>
<div
style={{
width: 50,
height: 50,
borderRadius: '50%',
backgroundColor: 'white'
}}
>
<img
style={{
width: '100%',
height: '100%',
padding: 10,
objectFit: 'contain'
}}
alt='AI'
src='https://raw.githubusercontent.com/zahidkhawaja/langchain-chat-nextjs/main/public/parroticon.png'
/>
</div>
</ListItemAvatar>
<ListItemText
sx={{ ml: 1 }}
primary='question'
secondary={`User's question from chatbox`}
/>
</ListItem>
</ListItemButton>
{dialogProps.availableNodesForVariable &&
dialogProps.availableNodesForVariable.length > 0 &&
dialogProps.availableNodesForVariable.map((node, index) => {
const selectedOutputAnchor = node.data.outputAnchors[0].options.find(
(ancr) => ancr.name === node.data.outputs['output']
)
return (
<ListItemButton
key={index}
sx={{
p: 0,
borderRadius: `${customization.borderRadius}px`,
boxShadow: '0 2px 14px 0 rgb(32 40 45 / 8%)',
mb: 1
}}
disabled={dialogProps.disabled}
onClick={() => onSelectOutputResponseClick(node)}
>
<ListItem alignItems='center'>
<ListItemAvatar>
<div
style={{
width: 50,
height: 50,
borderRadius: '50%',
backgroundColor: 'white'
}}
>
<img
style={{
width: '100%',
height: '100%',
padding: 10,
objectFit: 'contain'
}}
alt={node.data.name}
src={`${baseURL}/api/v1/node-icon/${node.data.name}`}
/>
</div>
</ListItemAvatar>
<ListItemText
sx={{ ml: 1 }}
primary={
node.data.inputs.chainName ? node.data.inputs.chainName : node.data.id
}
secondary={`${selectedOutputAnchor?.label ?? 'output'} from ${
node.data.label
}`}
/>
</ListItem>
</ListItemButton>
)
})}
</List>
</Box>
</PerfectScrollbar>
</div>
)}
</div>
</DialogContent>
<DialogActions>
<Button onClick={onCancel}>{dialogProps.cancelButtonName}</Button>
<StyledButton disabled={dialogProps.disabled} variant='contained' onClick={() => onConfirm(inputValue, inputParam.name)}>
{dialogProps.confirmButtonName}
</StyledButton>
</DialogActions>
</Dialog>
) : null
return createPortal(component, portalElement)
}
EditPromptValuesDialog.propTypes = {
show: PropTypes.bool,
dialogProps: PropTypes.object,
onCancel: PropTypes.func,
onConfirm: PropTypes.func
}
export default EditPromptValuesDialog

View File

@ -18,7 +18,7 @@ const StyledPopper = styled(Popper)({
}
})
export const Dropdown = ({ name, value, options, onSelect, disabled = false }) => {
export const Dropdown = ({ name, value, options, onSelect, disabled = false, disableClearable = false }) => {
const customization = useSelector((state) => state.customization)
const findMatchingOptions = (options = [], value) => options.find((option) => option.name === value)
const getDefaultOptionValue = () => ''
@ -29,6 +29,7 @@ export const Dropdown = ({ name, value, options, onSelect, disabled = false }) =
<Autocomplete
id={name}
disabled={disabled}
disableClearable={disableClearable}
size='small'
options={options || []}
value={findMatchingOptions(options, internalValue) || getDefaultOptionValue()}
@ -59,5 +60,6 @@ Dropdown.propTypes = {
value: PropTypes.string,
options: PropTypes.array,
onSelect: PropTypes.func,
disabled: PropTypes.bool
disabled: PropTypes.bool,
disableClearable: PropTypes.bool
}

View File

@ -8,11 +8,12 @@ import './prism-dark.css'
import PropTypes from 'prop-types'
import { useTheme } from '@mui/material/styles'
export const DarkCodeEditor = ({ value, placeholder, type, style, onValueChange, onMouseUp, onBlur }) => {
export const DarkCodeEditor = ({ value, placeholder, disabled = false, type, style, onValueChange, onMouseUp, onBlur }) => {
const theme = useTheme()
return (
<Editor
disabled={disabled}
value={value}
placeholder={placeholder}
highlight={(code) => highlight(code, type === 'json' ? languages.json : languages.js)}
@ -32,6 +33,7 @@ export const DarkCodeEditor = ({ value, placeholder, type, style, onValueChange,
DarkCodeEditor.propTypes = {
value: PropTypes.string,
placeholder: PropTypes.string,
disabled: PropTypes.bool,
type: PropTypes.string,
style: PropTypes.object,
onValueChange: PropTypes.func,

View File

@ -8,11 +8,12 @@ import './prism-light.css'
import PropTypes from 'prop-types'
import { useTheme } from '@mui/material/styles'
export const LightCodeEditor = ({ value, placeholder, type, style, onValueChange, onMouseUp, onBlur }) => {
export const LightCodeEditor = ({ value, placeholder, disabled = false, type, style, onValueChange, onMouseUp, onBlur }) => {
const theme = useTheme()
return (
<Editor
disabled={disabled}
value={value}
placeholder={placeholder}
highlight={(code) => highlight(code, type === 'json' ? languages.json : languages.js)}
@ -32,6 +33,7 @@ export const LightCodeEditor = ({ value, placeholder, type, style, onValueChange
LightCodeEditor.propTypes = {
value: PropTypes.string,
placeholder: PropTypes.string,
disabled: PropTypes.bool,
type: PropTypes.string,
style: PropTypes.object,
onValueChange: PropTypes.func,

View File

@ -1,28 +1,53 @@
import { useState } from 'react'
import PropTypes from 'prop-types'
import { FormControl, OutlinedInput } from '@mui/material'
import EditPromptValuesDialog from 'ui-component/dialog/EditPromptValuesDialog'
export const Input = ({ inputParam, value, onChange, disabled = false }) => {
export const Input = ({ inputParam, value, onChange, disabled = false, showDialog, dialogProps, onDialogCancel, onDialogConfirm }) => {
const [myValue, setMyValue] = useState(value ?? '')
const getInputType = (type) => {
switch (type) {
case 'string':
return 'text'
case 'password':
return 'password'
case 'number':
return 'number'
default:
return 'text'
}
}
return (
<FormControl sx={{ mt: 1, width: '100%' }} size='small'>
<OutlinedInput
id={inputParam.name}
size='small'
disabled={disabled}
type={inputParam.type === 'string' ? 'text' : inputParam.type}
placeholder={inputParam.placeholder}
multiline={!!inputParam.rows}
maxRows={inputParam.rows || 0}
minRows={inputParam.rows || 0}
value={myValue}
name={inputParam.name}
onChange={(e) => {
setMyValue(e.target.value)
onChange(e.target.value)
<>
<FormControl sx={{ mt: 1, width: '100%' }} size='small'>
<OutlinedInput
id={inputParam.name}
size='small'
disabled={disabled}
type={getInputType(inputParam.type)}
placeholder={inputParam.placeholder}
multiline={!!inputParam.rows}
rows={inputParam.rows ?? 1}
value={myValue}
name={inputParam.name}
onChange={(e) => {
setMyValue(e.target.value)
onChange(e.target.value)
}}
/>
</FormControl>
<EditPromptValuesDialog
show={showDialog}
dialogProps={dialogProps}
onCancel={onDialogCancel}
onConfirm={(newValue, inputParamName) => {
setMyValue(newValue)
onDialogConfirm(newValue, inputParamName)
}}
/>
</FormControl>
></EditPromptValuesDialog>
</>
)
}
@ -30,5 +55,9 @@ Input.propTypes = {
inputParam: PropTypes.object,
value: PropTypes.string,
onChange: PropTypes.func,
disabled: PropTypes.bool
disabled: PropTypes.bool,
showDialog: PropTypes.bool,
dialogProps: PropTypes.object,
onDialogCancel: PropTypes.func,
onDialogConfirm: PropTypes.func
}

View File

@ -9,13 +9,9 @@ export const TooltipWithParser = ({ title }) => {
return (
<Tooltip title={parser(title)} placement='right'>
<div style={{ display: 'flex', alignItems: 'center' }}>
<IconButton sx={{ height: 25, width: 25 }}>
<Info
style={{ background: 'transparent', color: customization.isDarkMode ? 'white' : 'inherit', height: 18, width: 18 }}
/>
</IconButton>
</div>
<IconButton sx={{ height: 25, width: 25 }}>
<Info style={{ background: 'transparent', color: customization.isDarkMode ? 'white' : 'inherit', height: 18, width: 18 }} />
</IconButton>
</Tooltip>
)
}

View File

@ -22,23 +22,12 @@ export const getUniqueNodeId = (nodeData, nodes) => {
return nodeId
}
export const initializeNodeData = (nodeParams) => {
export const initializeDefaultNodeData = (nodeParams) => {
const initialValues = {}
for (let i = 0; i < nodeParams.length; i += 1) {
const input = nodeParams[i]
// Load from nodeParams default values
initialValues[input.name] = input.default || ''
// Special case for array, always initialize the item if default is not set
if (input.type === 'array' && !input.default) {
const newObj = {}
for (let j = 0; j < input.array.length; j += 1) {
newObj[input.array[j].name] = input.array[j].default || ''
}
initialValues[input.name] = [newObj]
}
}
return initialValues
@ -46,62 +35,118 @@ export const initializeNodeData = (nodeParams) => {
export const initNode = (nodeData, newNodeId) => {
const inputAnchors = []
const inputParams = []
const incoming = nodeData.inputs ? nodeData.inputs.length : 0
const outgoing = 1
const whitelistTypes = ['asyncOptions', 'options', 'string', 'number', 'boolean', 'password', 'json', 'code', 'date', 'file', 'folder']
const whitelistTypes = ['options', 'string', 'number', 'boolean', 'password', 'json', 'code', 'date', 'file', 'folder']
for (let i = 0; i < incoming; i += 1) {
if (whitelistTypes.includes(nodeData.inputs[i].type)) continue
const newInput = {
...nodeData.inputs[i],
id: `${newNodeId}-input-${nodeData.inputs[i].name}-${nodeData.inputs[i].type}`
}
inputAnchors.push(newInput)
if (whitelistTypes.includes(nodeData.inputs[i].type)) {
inputParams.push(newInput)
} else {
inputAnchors.push(newInput)
}
}
const outputAnchors = []
for (let i = 0; i < outgoing; i += 1) {
const newOutput = {
id: `${newNodeId}-output-${nodeData.name}-${nodeData.baseClasses.join('|')}`,
name: nodeData.name,
label: nodeData.type,
type: nodeData.baseClasses.join(' | ')
if (nodeData.outputs && nodeData.outputs.length) {
const options = []
for (let j = 0; j < nodeData.outputs.length; j += 1) {
let baseClasses = ''
let type = ''
if (whitelistTypes.includes(nodeData.outputs[j].type)) {
baseClasses = nodeData.outputs[j].type
type = nodeData.outputs[j].type
} else {
baseClasses = nodeData.baseClasses.join('|')
type = nodeData.baseClasses.join(' | ')
}
const newOutputOption = {
id: `${newNodeId}-output-${nodeData.outputs[j].name}-${baseClasses}`,
name: nodeData.outputs[j].name,
label: nodeData.outputs[j].label,
type
}
options.push(newOutputOption)
}
const newOutput = {
name: 'output',
label: 'Output',
type: 'options',
options,
default: nodeData.outputs[0].name
}
outputAnchors.push(newOutput)
} else {
const newOutput = {
id: `${newNodeId}-output-${nodeData.name}-${nodeData.baseClasses.join('|')}`,
name: nodeData.name,
label: nodeData.type,
type: nodeData.baseClasses.join(' | ')
}
outputAnchors.push(newOutput)
}
outputAnchors.push(newOutput)
}
nodeData.id = newNodeId
nodeData.inputAnchors = inputAnchors
nodeData.outputAnchors = outputAnchors
/*
Initial inputs = [
/* Initial
inputs = [
{
label: 'field_label',
name: 'field'
label: 'field_label_1',
name: 'string'
},
{
label: 'field_label_2',
name: 'CustomType'
}
]
// Turn into inputs object with default values
Converted inputs = { 'field': 'defaultvalue' }
=> Convert to inputs, inputParams, inputAnchors
=> inputs = { 'field': 'defaultvalue' } // Turn into inputs object with default values
// Move remaining inputs that are not part of inputAnchors to inputParams
inputParams = [
{
label: 'field_label',
name: 'field'
}
]
=> // For inputs that are part of whitelistTypes
inputParams = [
{
label: 'field_label_1',
name: 'string'
}
]
=> // For inputs that are not part of whitelistTypes
inputAnchors = [
{
label: 'field_label_2',
name: 'CustomType'
}
]
*/
if (nodeData.inputs) {
nodeData.inputParams = nodeData.inputs.filter(({ name }) => !nodeData.inputAnchors.some((exclude) => exclude.name === name))
nodeData.inputs = initializeNodeData(nodeData.inputs)
nodeData.inputAnchors = inputAnchors
nodeData.inputParams = inputParams
nodeData.inputs = initializeDefaultNodeData(nodeData.inputs)
} else {
nodeData.inputAnchors = []
nodeData.inputParams = []
nodeData.inputs = {}
}
if (nodeData.outputs) {
nodeData.outputs = initializeDefaultNodeData(outputAnchors)
} else {
nodeData.outputs = {}
}
nodeData.outputAnchors = outputAnchors
nodeData.id = newNodeId
return nodeData
}
@ -133,7 +178,9 @@ export const isValidConnection = (connection, reactFlowInstance) => {
return true
}
} else {
const targetNodeInputAnchor = targetNode.data.inputAnchors.find((ancr) => ancr.id === targetHandle)
const targetNodeInputAnchor =
targetNode.data.inputAnchors.find((ancr) => ancr.id === targetHandle) ||
targetNode.data.inputParams.find((ancr) => ancr.id === targetHandle)
if (
(targetNodeInputAnchor &&
!targetNodeInputAnchor?.list &&
@ -144,7 +191,6 @@ export const isValidConnection = (connection, reactFlowInstance) => {
}
}
}
return false
}
@ -200,6 +246,7 @@ export const generateExportFlowData = (flowData) => {
inputAnchors: node.data.inputAnchors,
inputs: {},
outputAnchors: node.data.outputAnchors,
outputs: node.data.outputs,
selected: false
}
@ -225,11 +272,16 @@ export const generateExportFlowData = (flowData) => {
return exportJson
}
export const copyToClipboard = (e) => {
const src = e.src
if (Array.isArray(src) || typeof src === 'object') {
navigator.clipboard.writeText(JSON.stringify(src, null, ' '))
} else {
navigator.clipboard.writeText(src)
export const getAvailableNodesForVariable = (nodes, edges, target, targetHandle) => {
// example edge id = "llmChain_0-llmChain_0-output-outputPrediction-string-llmChain_1-llmChain_1-input-promptValues-string"
// {source} -{sourceHandle} -{target} -{targetHandle}
const parentNodes = []
const inputEdges = edges.filter((edg) => edg.target === target && edg.targetHandle === targetHandle)
if (inputEdges && inputEdges.length) {
for (let j = 0; j < inputEdges.length; j += 1) {
const node = nodes.find((nd) => nd.id === inputEdges[j].source)
parentNodes.push(node)
}
}
return parentNodes
}

View File

@ -12,7 +12,7 @@ import NodeOutputHandler from './NodeOutputHandler'
// const
import { baseURL } from 'store/constant'
import { IconTrash } from '@tabler/icons'
import { IconTrash, IconCopy } from '@tabler/icons'
import { flowContext } from 'store/context/ReactFlowContext'
const CardWrapper = styled(MainCard)(({ theme }) => ({
@ -33,7 +33,7 @@ const CardWrapper = styled(MainCard)(({ theme }) => ({
const CanvasNode = ({ data }) => {
const theme = useTheme()
const { deleteNode } = useContext(flowContext)
const { deleteNode, duplicateNode } = useContext(flowContext)
return (
<>
@ -76,10 +76,22 @@ const CanvasNode = ({ data }) => {
</Box>
<div style={{ flexGrow: 1 }}></div>
<IconButton
title='Duplicate'
onClick={() => {
duplicateNode(data.id)
}}
sx={{ height: 35, width: 35, '&:hover': { color: theme?.palette.primary.main } }}
color={theme?.customization?.isDarkMode ? theme.colors?.paper : 'inherit'}
>
<IconCopy />
</IconButton>
<IconButton
title='Delete'
onClick={() => {
deleteNode(data.id)
}}
sx={{ height: 35, width: 35, mr: 1 }}
sx={{ height: 35, width: 35, mr: 1, '&:hover': { color: 'red' } }}
color={theme?.customization?.isDarkMode ? theme.colors?.paper : 'inherit'}
>
<IconTrash />
</IconButton>

View File

@ -4,13 +4,16 @@ import { useEffect, useRef, useState, useContext } from 'react'
// material-ui
import { useTheme, styled } from '@mui/material/styles'
import { Box, Typography, Tooltip } from '@mui/material'
import { Box, Typography, Tooltip, IconButton } from '@mui/material'
import { tooltipClasses } from '@mui/material/Tooltip'
import { IconArrowsMaximize } from '@tabler/icons'
// project import
import { Dropdown } from 'ui-component/dropdown/Dropdown'
import { Input } from 'ui-component/input/Input'
import { File } from 'ui-component/file/File'
import { flowContext } from 'store/context/ReactFlowContext'
import { isValidConnection } from 'utils/genericHelper'
import { isValidConnection, getAvailableNodesForVariable } from 'utils/genericHelper'
const CustomWidthTooltip = styled(({ className, ...props }) => <Tooltip {...props} classes={{ popper: className }} />)({
[`& .${tooltipClasses.tooltip}`]: {
@ -23,9 +26,35 @@ const CustomWidthTooltip = styled(({ className, ...props }) => <Tooltip {...prop
const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false }) => {
const theme = useTheme()
const ref = useRef(null)
const { reactFlowInstance } = useContext(flowContext)
const updateNodeInternals = useUpdateNodeInternals()
const [position, setPosition] = useState(0)
const { reactFlowInstance } = useContext(flowContext)
const [showExpandDialog, setShowExpandDialog] = useState(false)
const [expandDialogProps, setExpandDialogProps] = useState({})
const onExpandDialogClicked = (value, inputParam) => {
const dialogProp = {
value,
inputParam,
disabled,
confirmButtonName: 'Save',
cancelButtonName: 'Cancel'
}
if (!disabled) {
const nodes = reactFlowInstance.getNodes()
const edges = reactFlowInstance.getEdges()
const nodesForVariable = inputParam.acceptVariable ? getAvailableNodesForVariable(nodes, edges, data.id, inputParam.id) : []
dialogProp.availableNodesForVariable = nodesForVariable
}
setExpandDialogProps(dialogProp)
setShowExpandDialog(true)
}
const onExpandDialogSave = (newValue, inputParamName) => {
setShowExpandDialog(false)
data.inputs[inputParamName] = newValue
}
useEffect(() => {
if (ref.current && ref.current.offsetTop && ref.current.clientHeight) {
@ -68,11 +97,47 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false }) =
{inputParam && (
<>
{inputParam.acceptVariable && (
<CustomWidthTooltip placement='left' title={inputParam.type}>
<Handle
type='target'
position={Position.Left}
key={inputParam.id}
id={inputParam.id}
isValidConnection={(connection) => isValidConnection(connection, reactFlowInstance)}
style={{
height: 10,
width: 10,
backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary,
top: position
}}
/>
</CustomWidthTooltip>
)}
<Box sx={{ p: 2 }}>
<Typography>
{inputParam.label}
{!inputParam.optional && <span style={{ color: 'red' }}>&nbsp;*</span>}
</Typography>
<div style={{ display: 'flex', flexDirection: 'row' }}>
<Typography>
{inputParam.label}
{!inputParam.optional && <span style={{ color: 'red' }}>&nbsp;*</span>}
</Typography>
<div style={{ flexGrow: 1 }}></div>
{inputParam.type === 'string' && inputParam.rows && (
<IconButton
size='small'
sx={{
height: 25,
width: 25
}}
title='Expand'
color='primary'
onClick={() =>
onExpandDialogClicked(data.inputs[inputParam.name] ?? inputParam.default ?? '', inputParam)
}
>
<IconArrowsMaximize />
</IconButton>
)}
</div>
{inputParam.type === 'file' && (
<File
disabled={disabled}
@ -87,6 +152,10 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false }) =
inputParam={inputParam}
onChange={(newValue) => (data.inputs[inputParam.name] = newValue)}
value={data.inputs[inputParam.name] ?? inputParam.default ?? ''}
showDialog={showExpandDialog}
dialogProps={expandDialogProps}
onDialogCancel={() => setShowExpandDialog(false)}
onDialogConfirm={(newValue, inputParamName) => onExpandDialogSave(newValue, inputParamName)}
/>
)}
{inputParam.type === 'options' && (

View File

@ -8,6 +8,7 @@ import { Box, Typography, Tooltip } from '@mui/material'
import { tooltipClasses } from '@mui/material/Tooltip'
import { flowContext } from 'store/context/ReactFlowContext'
import { isValidConnection } from 'utils/genericHelper'
import { Dropdown } from 'ui-component/dropdown/Dropdown'
const CustomWidthTooltip = styled(({ className, ...props }) => <Tooltip {...props} classes={{ popper: className }} />)({
[`& .${tooltipClasses.tooltip}`]: {
@ -17,11 +18,12 @@ const CustomWidthTooltip = styled(({ className, ...props }) => <Tooltip {...prop
// ===========================|| NodeOutputHandler ||=========================== //
const NodeOutputHandler = ({ outputAnchor, data }) => {
const NodeOutputHandler = ({ outputAnchor, data, disabled = false }) => {
const theme = useTheme()
const ref = useRef(null)
const updateNodeInternals = useUpdateNodeInternals()
const [position, setPosition] = useState(0)
const [dropdownValue, setDropdownValue] = useState(null)
const { reactFlowInstance } = useContext(flowContext)
useEffect(() => {
@ -39,33 +41,82 @@ const NodeOutputHandler = ({ outputAnchor, data }) => {
}, 0)
}, [data.id, position, updateNodeInternals])
useEffect(() => {
if (dropdownValue) {
setTimeout(() => {
updateNodeInternals(data.id)
}, 0)
}
}, [data.id, dropdownValue, updateNodeInternals])
return (
<div ref={ref}>
<CustomWidthTooltip placement='right' title={outputAnchor.type}>
<Handle
type='source'
position={Position.Right}
key={outputAnchor.id}
id={outputAnchor.id}
isValidConnection={(connection) => isValidConnection(connection, reactFlowInstance)}
style={{
height: 10,
width: 10,
backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary,
top: position
}}
/>
</CustomWidthTooltip>
<Box sx={{ p: 2, textAlign: 'end' }}>
<Typography>{outputAnchor.label}</Typography>
</Box>
{outputAnchor.type !== 'options' && !outputAnchor.options && (
<>
<CustomWidthTooltip placement='right' title={outputAnchor.type}>
<Handle
type='source'
position={Position.Right}
key={outputAnchor.id}
id={outputAnchor.id}
isValidConnection={(connection) => isValidConnection(connection, reactFlowInstance)}
style={{
height: 10,
width: 10,
backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary,
top: position
}}
/>
</CustomWidthTooltip>
<Box sx={{ p: 2, textAlign: 'end' }}>
<Typography>{outputAnchor.label}</Typography>
</Box>
</>
)}
{outputAnchor.type === 'options' && outputAnchor.options && outputAnchor.options.length > 0 && (
<>
<CustomWidthTooltip
placement='right'
title={
outputAnchor.options.find((opt) => opt.name === data.outputs?.[outputAnchor.name])?.type ?? outputAnchor.type
}
>
<Handle
type='source'
position={Position.Right}
id={outputAnchor.options.find((opt) => opt.name === data.outputs?.[outputAnchor.name])?.id ?? ''}
isValidConnection={(connection) => isValidConnection(connection, reactFlowInstance)}
style={{
height: 10,
width: 10,
backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary,
top: position
}}
/>
</CustomWidthTooltip>
<Box sx={{ p: 2, textAlign: 'end' }}>
<Dropdown
disabled={disabled}
disableClearable={true}
name={outputAnchor.name}
options={outputAnchor.options}
onSelect={(newValue) => {
setDropdownValue(newValue)
data.outputs[outputAnchor.name] = newValue
}}
value={data.outputs[outputAnchor.name] ?? outputAnchor.default ?? 'choose an option'}
/>
</Box>
</>
)}
</div>
)
}
NodeOutputHandler.propTypes = {
outputAnchor: PropTypes.object,
data: PropTypes.object
data: PropTypes.object,
disabled: PropTypes.bool
}
export default NodeOutputHandler

View File

@ -108,10 +108,14 @@ const Canvas = () => {
setTimeout(() => setDirty(), 0)
let value
const inputAnchor = node.data.inputAnchors.find((ancr) => ancr.name === targetInput)
const inputParam = node.data.inputParams.find((param) => param.name === targetInput)
if (inputAnchor && inputAnchor.list) {
const newValues = node.data.inputs[targetInput] || []
newValues.push(`{{${sourceNodeId}.data.instance}}`)
value = newValues
} else if (inputParam && inputParam.acceptVariable) {
value = node.data.inputs[targetInput] || ''
} else {
value = `{{${sourceNodeId}.data.instance}}`
}

View File

@ -88,7 +88,7 @@ const MarketplaceCanvasNode = ({ data }) => {
</>
)}
{data.inputAnchors.map((inputAnchor, index) => (
<NodeInputHandler key={index} inputAnchor={inputAnchor} data={data} />
<NodeInputHandler disabled={true} key={index} inputAnchor={inputAnchor} data={data} />
))}
{data.inputParams.map((inputParam, index) => (
<NodeInputHandler disabled={true} key={index} inputParam={inputParam} data={data} />
@ -108,7 +108,7 @@ const MarketplaceCanvasNode = ({ data }) => {
<Divider />
{data.outputAnchors.map((outputAnchor, index) => (
<NodeOutputHandler key={index} outputAnchor={outputAnchor} data={data} />
<NodeOutputHandler disabled={true} key={index} outputAnchor={outputAnchor} data={data} />
))}
</Box>
</CardWrapper>