From 4b9c39cf544cdebe46760e8ce899c35b5836aadd Mon Sep 17 00:00:00 2001 From: Henry Date: Sun, 16 Apr 2023 23:17:08 +0100 Subject: [PATCH] Add feature to be able to chain prompt values --- package.json | 1 + .../nodes/chains/LLMChain/LLMChain.ts | 120 +++-- packages/components/src/Interface.ts | 26 +- packages/server/marketplaces/Antonym.json | 53 +- .../server/marketplaces/Prompt Chaining.json | 508 ++++++++++++++++++ .../server/marketplaces/Simple LLM Chain.json | 73 ++- packages/server/marketplaces/Translator.json | 68 ++- packages/server/src/ChatflowPool.ts | 7 +- packages/server/src/Interface.ts | 8 +- packages/server/src/index.ts | 58 +- packages/server/src/utils/index.ts | 74 ++- .../ui/src/store/context/ReactFlowContext.js | 45 +- .../dialog/EditPromptValuesDialog.css | 6 + .../dialog/EditPromptValuesDialog.js | 256 +++++++++ .../ui/src/ui-component/dropdown/Dropdown.js | 6 +- .../src/ui-component/editor/DarkCodeEditor.js | 4 +- .../ui-component/editor/LightCodeEditor.js | 4 +- packages/ui/src/ui-component/input/Input.js | 67 ++- .../ui-component/tooltip/TooltipWithParser.js | 10 +- packages/ui/src/utils/genericHelper.js | 148 +++-- packages/ui/src/views/canvas/CanvasNode.js | 18 +- .../ui/src/views/canvas/NodeInputHandler.js | 83 ++- .../ui/src/views/canvas/NodeOutputHandler.js | 91 +++- packages/ui/src/views/canvas/index.js | 4 + .../marketplaces/MarketplaceCanvasNode.js | 4 +- 25 files changed, 1496 insertions(+), 246 deletions(-) create mode 100644 packages/server/marketplaces/Prompt Chaining.json create mode 100644 packages/ui/src/ui-component/dialog/EditPromptValuesDialog.css create mode 100644 packages/ui/src/ui-component/dialog/EditPromptValuesDialog.js diff --git a/package.json b/package.json index cfc2aee50..82ade94ce 100644 --- a/package.json +++ b/package.json @@ -11,6 +11,7 @@ ], "scripts": { "build": "turbo run build", + "build-force": "turbo run build --force", "dev": "turbo run dev --parallel", "start": "run-script-os", "start:windows": "cd packages/server/bin && run start", diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts index 0200b3160..df8f85cfe 100644 --- a/packages/components/nodes/chains/LLMChain/LLMChain.ts +++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts @@ -1,4 +1,4 @@ -import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' import { getBaseClasses } from '../../../src/utils' import { LLMChain } from 'langchain/chains' import { BaseLanguageModel } from 'langchain/base_language' @@ -13,6 +13,7 @@ class LLMChain_Chains implements INode { baseClasses: string[] description: string inputs: INodeParams[] + outputs: INodeOutputsValue[] constructor() { this.label = 'LLM Chain' @@ -33,6 +34,13 @@ class LLMChain_Chains implements INode { name: 'prompt', type: 'BasePromptTemplate' }, + { + label: 'Chain Name', + name: 'chainName', + type: 'string', + placeholder: 'Task Creation Chain', + optional: true + }, { label: 'Format Prompt Values', name: 'promptValues', @@ -42,57 +50,99 @@ class LLMChain_Chains implements INode { "input_language": "English", "output_language": "French" }`, - optional: true + optional: true, + acceptVariable: true, + list: true + } + ] + this.outputs = [ + { + label: this.label, + name: this.name, + type: this.type + }, + { + label: 'Output Prediction', + name: 'outputPrediction', + type: 'string' } ] } - async init(nodeData: INodeData): Promise { + async init(nodeData: INodeData, input: string): Promise { const model = nodeData.inputs?.model as BaseLanguageModel const prompt = nodeData.inputs?.prompt as BasePromptTemplate + const output = nodeData.outputs?.output as string + const promptValuesStr = nodeData.inputs?.promptValues as string - const chain = new LLMChain({ llm: model, prompt }) - return chain + if (output === this.name) { + const chain = new LLMChain({ llm: model, prompt }) + return chain + } else if (output === 'outputPrediction') { + const chain = new LLMChain({ llm: model, prompt }) + const inputVariables = chain.prompt.inputVariables as string[] // ["product"] + const res = await runPrediction(inputVariables, chain, input, promptValuesStr) + // eslint-disable-next-line no-console + console.log('\x1b[92m\x1b[1m\n*****OUTPUT PREDICTION*****\n\x1b[0m\x1b[0m') + // eslint-disable-next-line no-console + console.log(res) + return res + } } async run(nodeData: INodeData, input: string): Promise { const inputVariables = nodeData.instance.prompt.inputVariables as string[] // ["product"] const chain = nodeData.instance as LLMChain + const promptValuesStr = nodeData.inputs?.promptValues as string + const res = await runPrediction(inputVariables, chain, input, promptValuesStr) + // eslint-disable-next-line no-console + console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m') + // eslint-disable-next-line no-console + console.log(res) + return res + } +} - if (inputVariables.length === 1) { - const res = await chain.run(input) - return res - } else if (inputVariables.length > 1) { - const promptValuesStr = nodeData.inputs?.promptValues as string - if (!promptValuesStr) throw new Error('Please provide Prompt Values') +const runPrediction = async (inputVariables: string[], chain: LLMChain, input: string, promptValuesStr: string) => { + if (inputVariables.length === 1) { + const res = await chain.run(input) + return res + } else if (inputVariables.length > 1) { + if (!promptValuesStr) throw new Error('Please provide Prompt Values') + const promptValues = JSON.parse(promptValuesStr.replace(/\s/g, '')) - const promptValues = JSON.parse(promptValuesStr.replace(/\s/g, '')) + let seen: string[] = [] - let seen: string[] = [] - - for (const variable of inputVariables) { - seen.push(variable) - if (promptValues[variable]) { - seen.pop() - } + for (const variable of inputVariables) { + seen.push(variable) + if (promptValues[variable]) { + seen.pop() } - - if (seen.length === 1) { - const lastValue = seen.pop() - if (!lastValue) throw new Error('Please provide Prompt Values') - const options = { - ...promptValues, - [lastValue]: input - } - const res = await chain.call(options) - return res?.text - } else { - throw new Error('Please provide Prompt Values') - } - } else { - const res = await chain.run(input) - return res } + + if (seen.length === 0) { + // All inputVariables have fixed values specified + const options = { + ...promptValues + } + const res = await chain.call(options) + return res?.text + } else if (seen.length === 1) { + // If one inputVariable is not specify, use input (user's question) as value + const lastValue = seen.pop() + if (!lastValue) throw new Error('Please provide Prompt Values') + const options = { + ...promptValues, + [lastValue]: input + } + const res = await chain.call(options) + return res?.text + } else { + throw new Error(`Please provide Prompt Values for: ${seen.join(', ')}`) + } + } else { + const res = await chain.run(input) + return res } } diff --git a/packages/components/src/Interface.ts b/packages/components/src/Interface.ts index 2de18d5cd..d1831fa11 100644 --- a/packages/components/src/Interface.ts +++ b/packages/components/src/Interface.ts @@ -2,18 +2,7 @@ * Types */ -export type NodeParamsType = - | 'asyncOptions' - | 'options' - | 'string' - | 'number' - | 'boolean' - | 'password' - | 'json' - | 'code' - | 'date' - | 'file' - | 'folder' +export type NodeParamsType = 'options' | 'string' | 'number' | 'boolean' | 'password' | 'json' | 'code' | 'date' | 'file' | 'folder' export type CommonType = string | number | boolean | undefined | null @@ -40,6 +29,13 @@ export interface INodeOptionsValue { description?: string } +export interface INodeOutputsValue { + label: string + name: string + type: string + description?: string +} + export interface INodeParams { label: string name: string @@ -50,6 +46,7 @@ export interface INodeParams { optional?: boolean | INodeDisplay rows?: number list?: boolean + acceptVariable?: boolean placeholder?: string fileType?: string } @@ -75,12 +72,15 @@ export interface INodeProperties { export interface INode extends INodeProperties { inputs?: INodeParams[] - getInstance?(nodeData: INodeData): Promise + output?: INodeOutputsValue[] + init?(nodeData: INodeData, input: string, options?: ICommonObject): Promise run?(nodeData: INodeData, input: string, options?: ICommonObject): Promise } export interface INodeData extends INodeProperties { + id: string inputs?: ICommonObject + outputs?: ICommonObject instance?: any } diff --git a/packages/server/marketplaces/Antonym.json b/packages/server/marketplaces/Antonym.json index c91161fc7..9f83076f4 100644 --- a/packages/server/marketplaces/Antonym.json +++ b/packages/server/marketplaces/Antonym.json @@ -3,7 +3,7 @@ "nodes": [ { "width": 300, - "height": 360, + "height": 366, "id": "promptTemplate_0", "position": { "x": 294.38456937448433, @@ -50,7 +50,7 @@ }, { "width": 300, - "height": 886, + "height": 905, "id": "fewShotPromptTemplate_0", "position": { "x": 719.2200337843097, @@ -223,11 +223,11 @@ }, { "width": 300, - "height": 461, + "height": 592, "id": "llmChain_0", "position": { - "x": 1499.2654451385026, - "y": 356.3275374721362 + "x": 1489.0277667172852, + "y": 357.461975349771 }, "type": "customNode", "data": { @@ -239,13 +239,24 @@ "category": "Chains", "description": "Chain to run queries against LLMs", "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Task Creation Chain", + "optional": true, + "id": "llmChain_0-input-chainName-string" + }, { "label": "Format Prompt Values", "name": "promptValues", "type": "string", "rows": 5, "placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}", - "optional": true + "optional": true, + "acceptVariable": true, + "list": true, + "id": "llmChain_0-input-promptValues-string" } ], "inputAnchors": [ @@ -265,22 +276,40 @@ "inputs": { "model": "{{openAI_0.data.instance}}", "prompt": "{{fewShotPromptTemplate_0.data.instance}}", + "chainName": "", "promptValues": "" }, "outputAnchors": [ { - "id": "llmChain_0-output-llmChain-LLMChain|BaseChain", - "name": "llmChain", - "label": "LLMChain", - "type": "LLMChain | BaseChain" + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain" + }, + { + "id": "llmChain_0-output-outputPrediction-string", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string" + } + ], + "default": "llmChain" } ], + "outputs": { + "output": "llmChain" + }, "selected": false }, "selected": false, "positionAbsolute": { - "x": 1499.2654451385026, - "y": 356.3275374721362 + "x": 1489.0277667172852, + "y": 357.461975349771 }, "dragging": false } diff --git a/packages/server/marketplaces/Prompt Chaining.json b/packages/server/marketplaces/Prompt Chaining.json new file mode 100644 index 000000000..ede2b6425 --- /dev/null +++ b/packages/server/marketplaces/Prompt Chaining.json @@ -0,0 +1,508 @@ +{ + "description": "Use output from a chain as prompt for another chain", + "nodes": [ + { + "width": 300, + "height": 592, + "id": "llmChain_0", + "position": { + "x": 586.058087758348, + "y": 109.99914917840562 + }, + "type": "customNode", + "data": { + "id": "llmChain_0", + "label": "LLM Chain", + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Task Creation Chain", + "optional": true, + "id": "llmChain_0-input-chainName-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "string", + "rows": 5, + "placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "llmChain_0-input-promptValues-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_0-input-prompt-BasePromptTemplate" + } + ], + "inputs": { + "model": "{{openAI_0.data.instance}}", + "prompt": "{{promptTemplate_0.data.instance}}", + "chainName": "FirstChain", + "promptValues": "{\n \"objective\": \"{{question}}\"\n}" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain" + }, + { + "id": "llmChain_0-output-outputPrediction-string", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "outputPrediction" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 586.058087758348, + "y": 109.99914917840562 + }, + "dragging": false + }, + { + "width": 300, + "height": 366, + "id": "promptTemplate_0", + "position": { + "x": 231.20329590069747, + "y": 313.54994365714185 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_0", + "label": "Prompt Template", + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 5, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_0-input-template-string" + } + ], + "inputAnchors": [], + "inputs": { + "template": "You are an AI who performs one task based on the following objective: {objective}.\nRespond with how you would complete this task:" + }, + "outputAnchors": [ + { + "id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 231.20329590069747, + "y": 313.54994365714185 + }, + "dragging": false + }, + { + "width": 300, + "height": 592, + "id": "llmChain_1", + "position": { + "x": 1637.4327907249694, + "y": 127.71255193457947 + }, + "type": "customNode", + "data": { + "id": "llmChain_1", + "label": "LLM Chain", + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Task Creation Chain", + "optional": true, + "id": "llmChain_1-input-chainName-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "string", + "rows": 5, + "placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "llmChain_1-input-promptValues-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_1-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_1-input-prompt-BasePromptTemplate" + } + ], + "inputs": { + "model": "{{openAI_0.data.instance}}", + "prompt": "{{promptTemplate_1.data.instance}}", + "chainName": "FinalChain", + "promptValues": "{\n \"objective\": \"{{question}}\",\n \"result\": \"{{llmChain_0.data.instance}}\"\n}" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_1-output-llmChain-LLMChain|BaseChain", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain" + }, + { + "id": "llmChain_1-output-outputPrediction-string", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1637.4327907249694, + "y": 127.71255193457947 + }, + "dragging": false + }, + { + "width": 300, + "height": 366, + "id": "promptTemplate_1", + "position": { + "x": 950.292796637893, + "y": 62.31864791878181 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_1", + "label": "Prompt Template", + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 5, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_1-input-template-string" + } + ], + "inputAnchors": [], + "inputs": { + "template": "You are a task creation AI that uses the result of an execution agent to create new tasks with the following objective: {objective}.\nThe last completed task has the result: {result}.\nBased on the result, create new tasks to be completed by the AI system that do not overlap with result.\nReturn the tasks as an array." + }, + "outputAnchors": [ + { + "id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 950.292796637893, + "y": 62.31864791878181 + }, + "dragging": false + }, + { + "width": 300, + "height": 472, + "id": "openAI_0", + "position": { + "x": 225.7603660247592, + "y": -193.45016241085625 + }, + "type": "customNode", + "data": { + "id": "openAI_0", + "label": "OpenAI", + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAI_0-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "text-davinci-003", + "name": "text-davinci-003" + }, + { + "label": "text-davinci-002", + "name": "text-davinci-002" + }, + { + "label": "text-curie-001", + "name": "text-curie-001" + }, + { + "label": "text-babbage-001", + "name": "text-babbage-001" + } + ], + "default": "text-davinci-003", + "optional": true, + "id": "openAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_0-input-temperature-number" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "text-davinci-003", + "temperature": "0" + }, + "outputAnchors": [ + { + "id": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "dragging": false, + "positionAbsolute": { + "x": 225.7603660247592, + "y": -193.45016241085625 + } + }, + { + "width": 300, + "height": 472, + "id": "openAI_1", + "position": { + "x": 1275.7643968219816, + "y": -197.07668364123862 + }, + "type": "customNode", + "data": { + "id": "openAI_1", + "label": "OpenAI", + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAI_0-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "text-davinci-003", + "name": "text-davinci-003" + }, + { + "label": "text-davinci-002", + "name": "text-davinci-002" + }, + { + "label": "text-curie-001", + "name": "text-curie-001" + }, + { + "label": "text-babbage-001", + "name": "text-babbage-001" + } + ], + "default": "text-davinci-003", + "optional": true, + "id": "openAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_0-input-temperature-number" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "text-davinci-003", + "temperature": "0" + }, + "outputAnchors": [ + { + "id": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "dragging": false, + "positionAbsolute": { + "x": 1275.7643968219816, + "y": -197.07668364123862 + } + } + ], + "edges": [ + { + "source": "promptTemplate_0", + "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + }, + { + "source": "openAI_0", + "sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_0-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "promptTemplate_1", + "sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + }, + { + "source": "llmChain_0", + "sourceHandle": "llmChain_0-output-outputPrediction-string", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-promptValues-string", + "type": "buttonedge", + "id": "llmChain_0-llmChain_0-output-outputPrediction-string-llmChain_1-llmChain_1-input-promptValues-string", + "data": { + "label": "" + } + }, + { + "source": "openAI_1", + "sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_1-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_1-llmChain_1-input-model-BaseLanguageModel", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Simple LLM Chain.json b/packages/server/marketplaces/Simple LLM Chain.json index 1e7529a0e..994e27507 100644 --- a/packages/server/marketplaces/Simple LLM Chain.json +++ b/packages/server/marketplaces/Simple LLM Chain.json @@ -81,7 +81,7 @@ }, { "width": 300, - "height": 360, + "height": 366, "id": "promptTemplate_0", "position": { "x": 970.576876549135, @@ -128,11 +128,11 @@ }, { "width": 300, - "height": 461, + "height": 592, "id": "llmChain_0", "position": { - "x": 1414.1175742139496, - "y": 340.4040954840462 + "x": 1386.5063477084716, + "y": 211.47670100294192 }, "type": "customNode", "data": { @@ -144,13 +144,24 @@ "category": "Chains", "description": "Chain to run queries against LLMs", "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Task Creation Chain", + "optional": true, + "id": "llmChain_0-input-chainName-string" + }, { "label": "Format Prompt Values", "name": "promptValues", "type": "string", "rows": 5, "placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}", - "optional": true + "optional": true, + "acceptVariable": true, + "list": true, + "id": "llmChain_0-input-promptValues-string" } ], "inputAnchors": [ @@ -170,38 +181,45 @@ "inputs": { "model": "{{openAI_0.data.instance}}", "prompt": "{{promptTemplate_0.data.instance}}", + "chainName": "CompanyName Chain", "promptValues": "" }, "outputAnchors": [ { - "id": "llmChain_0-output-llmChain-LLMChain|BaseChain", - "name": "llmChain", - "label": "LLMChain", - "type": "LLMChain | BaseChain" + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain" + }, + { + "id": "llmChain_0-output-outputPrediction-string", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string" + } + ], + "default": "llmChain" } ], + "outputs": { + "output": "llmChain" + }, "selected": false }, "selected": false, "positionAbsolute": { - "x": 1414.1175742139496, - "y": 340.4040954840462 + "x": 1386.5063477084716, + "y": 211.47670100294192 }, "dragging": false } ], "edges": [ - { - "source": "promptTemplate_0", - "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", - "type": "buttonedge", - "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", - "data": { - "label": "" - } - }, { "source": "openAI_0", "sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", @@ -212,6 +230,17 @@ "data": { "label": "" } + }, + { + "source": "promptTemplate_0", + "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } } ] } diff --git a/packages/server/marketplaces/Translator.json b/packages/server/marketplaces/Translator.json index f91792282..66e4e4822 100644 --- a/packages/server/marketplaces/Translator.json +++ b/packages/server/marketplaces/Translator.json @@ -1,13 +1,14 @@ { "description": "Language translation using LLM Chain with a Chat Prompt Template and Chat Model", + "nodes": [ { "width": 300, - "height": 460, + "height": 473, "id": "chatPromptTemplate_0", "position": { - "x": 524, - "y": 237 + "x": 906.3845860429262, + "y": 522.7223115041937 }, "type": "customNode", "data": { @@ -52,8 +53,8 @@ "selected": false, "dragging": false, "positionAbsolute": { - "x": 524, - "y": 237 + "x": 906.3845860429262, + "y": 522.7223115041937 } }, { @@ -61,8 +62,8 @@ "height": 472, "id": "chatOpenAI_0", "position": { - "x": 855.1997276913991, - "y": 24.090553068402556 + "x": 909.2168811101023, + "y": 10.159813502526418 }, "type": "customNode", "data": { @@ -133,18 +134,18 @@ }, "selected": false, "positionAbsolute": { - "x": 855.1997276913991, - "y": 24.090553068402556 + "x": 909.2168811101023, + "y": 10.159813502526418 }, "dragging": false }, { "width": 300, - "height": 461, + "height": 592, "id": "llmChain_0", "position": { - "x": 1192.2235692202612, - "y": 361.71736677076257 + "x": 1318.8661313433918, + "y": 323.51085023894643 }, "type": "customNode", "data": { @@ -156,13 +157,24 @@ "category": "Chains", "description": "Chain to run queries against LLMs", "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Task Creation Chain", + "optional": true, + "id": "llmChain_0-input-chainName-string" + }, { "label": "Format Prompt Values", "name": "promptValues", "type": "string", "rows": 5, "placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}", - "optional": true + "optional": true, + "acceptVariable": true, + "list": true, + "id": "llmChain_0-input-promptValues-string" } ], "inputAnchors": [ @@ -182,22 +194,40 @@ "inputs": { "model": "{{chatOpenAI_0.data.instance}}", "prompt": "{{chatPromptTemplate_0.data.instance}}", + "chainName": "", "promptValues": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}" }, "outputAnchors": [ { - "id": "llmChain_0-output-llmChain-LLMChain|BaseChain", - "name": "llmChain", - "label": "LLMChain", - "type": "LLMChain | BaseChain" + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain" + }, + { + "id": "llmChain_0-output-outputPrediction-string", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string" + } + ], + "default": "llmChain" } ], + "outputs": { + "output": "llmChain" + }, "selected": false }, "selected": false, "positionAbsolute": { - "x": 1192.2235692202612, - "y": 361.71736677076257 + "x": 1318.8661313433918, + "y": 323.51085023894643 }, "dragging": false } diff --git a/packages/server/src/ChatflowPool.ts b/packages/server/src/ChatflowPool.ts index cc738e032..125f7f57a 100644 --- a/packages/server/src/ChatflowPool.ts +++ b/packages/server/src/ChatflowPool.ts @@ -1,9 +1,8 @@ -import { INodeData } from 'flowise-components' -import { IActiveChatflows } from './Interface' +import { IActiveChatflows, INodeData } from './Interface' /** - * This pool is to keep track of active test triggers (event listeners), - * so we can clear the event listeners whenever user refresh or exit page + * This pool is to keep track of active chatflow pools + * so we can prevent building langchain flow all over again */ export class ChatflowPool { activeChatflows: IActiveChatflows = {} diff --git a/packages/server/src/Interface.ts b/packages/server/src/Interface.ts index d228d9376..cc83e1a88 100644 --- a/packages/server/src/Interface.ts +++ b/packages/server/src/Interface.ts @@ -1,4 +1,4 @@ -import { INode, INodeData } from 'flowise-components' +import { INode, INodeData as INodeDataFromComponent, INodeParams } from 'flowise-components' export type MessageType = 'apiMessage' | 'userMessage' @@ -38,6 +38,12 @@ export interface INodeDirectedGraph { [key: string]: string[] } +export interface INodeData extends INodeDataFromComponent { + inputAnchors: INodeParams[] + inputParams: INodeParams[] + outputAnchors: INodeParams[] +} + export interface IReactFlowNode { id: string position: { diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index cec71051b..599d0e620 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -4,15 +4,22 @@ import cors from 'cors' import http from 'http' import * as fs from 'fs' -import { IChatFlow, IncomingInput, IReactFlowNode, IReactFlowObject } from './Interface' -import { getNodeModulesPackagePath, getStartingNodes, buildLangchain, getEndingNode, constructGraphs } from './utils' +import { IChatFlow, IncomingInput, IReactFlowNode, IReactFlowObject, INodeData } from './Interface' +import { + getNodeModulesPackagePath, + getStartingNodes, + buildLangchain, + getEndingNode, + constructGraphs, + resolveVariables, + checkIfFlowNeedToRebuild +} from './utils' import { cloneDeep } from 'lodash' import { getDataSource } from './DataSource' import { NodesPool } from './NodesPool' import { ChatFlow } from './entity/ChatFlow' import { ChatMessage } from './entity/ChatMessage' import { ChatflowPool } from './ChatflowPool' -import { INodeData } from 'flowise-components' export class App { app: express.Application @@ -196,44 +203,61 @@ export class App { let nodeToExecuteData: INodeData + const chatflow = await this.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflowid + }) + if (!chatflow) return res.status(404).send(`Chatflow ${chatflowid} not found`) + + const flowData = chatflow.flowData + const parsedFlowData: IReactFlowObject = JSON.parse(flowData) + const nodes = parsedFlowData.nodes + const edges = parsedFlowData.edges + + // Check if node data exists in pool && not out of sync, prevent building whole flow again if ( Object.prototype.hasOwnProperty.call(this.chatflowPool.activeChatflows, chatflowid) && - this.chatflowPool.activeChatflows[chatflowid].inSync + this.chatflowPool.activeChatflows[chatflowid].inSync && + !checkIfFlowNeedToRebuild(nodes, this.chatflowPool.activeChatflows[chatflowid].endingNodeData) ) { nodeToExecuteData = this.chatflowPool.activeChatflows[chatflowid].endingNodeData } else { - const chatflow = await this.AppDataSource.getRepository(ChatFlow).findOneBy({ - id: chatflowid - }) - if (!chatflow) return res.status(404).send(`Chatflow ${chatflowid} not found`) - - const flowData = chatflow.flowData - const parsedFlowData: IReactFlowObject = JSON.parse(flowData) - /*** Get Ending Node with Directed Graph ***/ - const { graph, nodeDependencies } = constructGraphs(parsedFlowData.nodes, parsedFlowData.edges) + const { graph, nodeDependencies } = constructGraphs(nodes, edges) const directedGraph = graph const endingNodeId = getEndingNode(nodeDependencies, directedGraph) if (!endingNodeId) return res.status(500).send(`Ending node must be either a Chain or Agent`) + const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data + if (!endingNodeData) return res.status(500).send(`Ending node must be either a Chain or Agent`) + + if (!Object.values(endingNodeData.outputs ?? {}).includes(endingNodeData.name)) { + return res + .status(500) + .send( + `Output of ${endingNodeData.label} (${endingNodeData.id}) must be ${endingNodeData.label}, can't be an Output Prediction` + ) + } + /*** Get Starting Nodes with Non-Directed Graph ***/ - const constructedObj = constructGraphs(parsedFlowData.nodes, parsedFlowData.edges, true) + const constructedObj = constructGraphs(nodes, edges, true) const nonDirectedGraph = constructedObj.graph const { startingNodeIds, depthQueue } = getStartingNodes(nonDirectedGraph, endingNodeId) /*** BFS to traverse from Starting Nodes to Ending Node ***/ const reactFlowNodes = await buildLangchain( startingNodeIds, - parsedFlowData.nodes, + nodes, graph, depthQueue, - this.nodesPool.componentNodes + this.nodesPool.componentNodes, + incomingInput.question ) const nodeToExecute = reactFlowNodes.find((node: IReactFlowNode) => node.id === endingNodeId) if (!nodeToExecute) return res.status(404).send(`Node ${endingNodeId} not found`) - nodeToExecuteData = nodeToExecute.data + const reactFlowNodeData: INodeData = resolveVariables(nodeToExecute.data, reactFlowNodes, incomingInput.question) + nodeToExecuteData = reactFlowNodeData this.chatflowPool.add(chatflowid, nodeToExecuteData) } diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts index fce1e02e6..564ff51f3 100644 --- a/packages/server/src/utils/index.ts +++ b/packages/server/src/utils/index.ts @@ -8,10 +8,14 @@ import { INodeDirectedGraph, INodeQueue, IReactFlowEdge, - IReactFlowNode + IReactFlowNode, + IVariableDict, + INodeData } from '../Interface' import { cloneDeep, get } from 'lodash' -import { ICommonObject, INodeData } from 'flowise-components' +import { ICommonObject } from 'flowise-components' + +const QUESTION_VAR_PREFIX = 'question' /** * Returns the home folder path of the user if @@ -166,13 +170,15 @@ export const getEndingNode = (nodeDependencies: INodeDependencies, graph: INodeD * @param {INodeDirectedGraph} graph * @param {IDepthQueue} depthQueue * @param {IComponentNodes} componentNodes + * @param {string} question */ export const buildLangchain = async ( startingNodeIds: string[], reactFlowNodes: IReactFlowNode[], graph: INodeDirectedGraph, depthQueue: IDepthQueue, - componentNodes: IComponentNodes + componentNodes: IComponentNodes, + question: string ) => { const flowNodes = cloneDeep(reactFlowNodes) @@ -200,9 +206,9 @@ export const buildLangchain = async ( const nodeModule = await import(nodeInstanceFilePath) const newNodeInstance = new nodeModule.nodeClass() - const reactFlowNodeData: INodeData = resolveVariables(reactFlowNode.data, flowNodes) + const reactFlowNodeData: INodeData = resolveVariables(reactFlowNode.data, flowNodes, question) - flowNodes[nodeIndex].data.instance = await newNodeInstance.init(reactFlowNodeData) + flowNodes[nodeIndex].data.instance = await newNodeInstance.init(reactFlowNodeData, question) } catch (e: any) { console.error(e) throw new Error(e) @@ -247,11 +253,14 @@ export const buildLangchain = async ( * Get variable value from outputResponses.output * @param {string} paramValue * @param {IReactFlowNode[]} reactFlowNodes + * @param {string} question + * @param {boolean} isAcceptVariable * @returns {string} */ -export const getVariableValue = (paramValue: string, reactFlowNodes: IReactFlowNode[]) => { +export const getVariableValue = (paramValue: string, reactFlowNodes: IReactFlowNode[], question: string, isAcceptVariable = false) => { let returnVal = paramValue const variableStack = [] + const variableDict = {} as IVariableDict let startIdx = 0 const endIdx = returnVal.length - 1 @@ -269,17 +278,36 @@ export const getVariableValue = (paramValue: string, reactFlowNodes: IReactFlowN const variableEndIdx = startIdx const variableFullPath = returnVal.substring(variableStartIdx, variableEndIdx) + if (isAcceptVariable && variableFullPath === QUESTION_VAR_PREFIX) { + variableDict[`{{${variableFullPath}}}`] = question + } + // Split by first occurence of '.' to get just nodeId const [variableNodeId, _] = variableFullPath.split('.') const executedNode = reactFlowNodes.find((nd) => nd.id === variableNodeId) if (executedNode) { - const variableInstance = get(executedNode.data, 'instance') - returnVal = variableInstance + const variableValue = get(executedNode.data, 'instance') + if (isAcceptVariable) { + variableDict[`{{${variableFullPath}}}`] = variableValue + } else { + returnVal = variableValue + } } variableStack.pop() } startIdx += 1 } + + if (isAcceptVariable) { + const variablePaths = Object.keys(variableDict) + variablePaths.sort() // Sort by length of variable path because longer path could possibly contains nested variable + variablePaths.forEach((path) => { + const variableValue = variableDict[path] + // Replace all occurence + returnVal = returnVal.split(path).join(variableValue) + }) + return returnVal + } return returnVal } @@ -287,25 +315,26 @@ export const getVariableValue = (paramValue: string, reactFlowNodes: IReactFlowN * Loop through each inputs and resolve variable if neccessary * @param {INodeData} reactFlowNodeData * @param {IReactFlowNode[]} reactFlowNodes + * @param {string} question * @returns {INodeData} */ -export const resolveVariables = (reactFlowNodeData: INodeData, reactFlowNodes: IReactFlowNode[]): INodeData => { +export const resolveVariables = (reactFlowNodeData: INodeData, reactFlowNodes: IReactFlowNode[], question: string): INodeData => { const flowNodeData = cloneDeep(reactFlowNodeData) const types = 'inputs' const getParamValues = (paramsObj: ICommonObject) => { for (const key in paramsObj) { const paramValue: string = paramsObj[key] - if (Array.isArray(paramValue)) { const resolvedInstances = [] for (const param of paramValue) { - const resolvedInstance = getVariableValue(param, reactFlowNodes) + const resolvedInstance = getVariableValue(param, reactFlowNodes, question) resolvedInstances.push(resolvedInstance) } paramsObj[key] = resolvedInstances } else { - const resolvedInstance = getVariableValue(paramValue, reactFlowNodes) + const isAcceptVariable = reactFlowNodeData.inputParams.find((param) => param.name === key)?.acceptVariable ?? false + const resolvedInstance = getVariableValue(paramValue, reactFlowNodes, question, isAcceptVariable) paramsObj[key] = resolvedInstance } } @@ -317,3 +346,24 @@ export const resolveVariables = (reactFlowNodeData: INodeData, reactFlowNodes: I return flowNodeData } + +/** + * Rebuild flow if LLMChain has dependency on other chains + * User Question => Prompt_0 => LLMChain_0 => Prompt-1 => LLMChain_1 + * @param {IReactFlowNode[]} nodes + * @param {INodeData} nodeData + * @returns {boolean} + */ +export const checkIfFlowNeedToRebuild = (nodes: IReactFlowNode[], nodeData: INodeData) => { + if (nodeData.name !== 'llmChain') return false + + const node = nodes.find((nd) => nd.id === nodeData.id) + if (!node) throw new Error(`Node ${nodeData.id} not found`) + + const inputs = node.data.inputs + for (const key in inputs) { + const isInputAcceptVariable = node.data.inputParams.find((param) => param.name === key)?.acceptVariable || false + if (isInputAcceptVariable && inputs[key].includes('{{') && inputs[key].includes('}}')) return true + } + return false +} diff --git a/packages/ui/src/store/context/ReactFlowContext.js b/packages/ui/src/store/context/ReactFlowContext.js index b8b32606b..66a083974 100644 --- a/packages/ui/src/store/context/ReactFlowContext.js +++ b/packages/ui/src/store/context/ReactFlowContext.js @@ -1,9 +1,12 @@ import { createContext, useState } from 'react' import PropTypes from 'prop-types' +import { getUniqueNodeId } from 'utils/genericHelper' +import { cloneDeep } from 'lodash' const initialValue = { reactFlowInstance: null, setReactFlowInstance: () => {}, + duplicateNode: () => {}, deleteNode: () => {}, deleteEdge: () => {} } @@ -22,13 +25,53 @@ export const ReactFlowContext = ({ children }) => { reactFlowInstance.setEdges(reactFlowInstance.getEdges().filter((edge) => edge.id !== id)) } + const duplicateNode = (id) => { + const nodes = reactFlowInstance.getNodes() + const originalNode = nodes.find((n) => n.id === id) + if (originalNode) { + const newNodeId = getUniqueNodeId(originalNode.data, nodes) + const clonedNode = cloneDeep(originalNode) + + const duplicatedNode = { + ...clonedNode, + id: newNodeId, + position: { + x: clonedNode.position.x + 400, + y: clonedNode.position.y + }, + positionAbsolute: { + x: clonedNode.positionAbsolute.x + 400, + y: clonedNode.positionAbsolute.y + }, + data: { + ...clonedNode.data, + id: newNodeId + }, + selected: false + } + + const dataKeys = ['inputParams', 'inputAnchors', 'outputAnchors'] + + for (const key of dataKeys) { + for (const item of duplicatedNode.data[key]) { + if (item.id) { + item.id = item.id.replace(id, newNodeId) + } + } + } + + reactFlowInstance.setNodes([...nodes, duplicatedNode]) + } + } + return ( {children} diff --git a/packages/ui/src/ui-component/dialog/EditPromptValuesDialog.css b/packages/ui/src/ui-component/dialog/EditPromptValuesDialog.css new file mode 100644 index 000000000..d0e2ba261 --- /dev/null +++ b/packages/ui/src/ui-component/dialog/EditPromptValuesDialog.css @@ -0,0 +1,6 @@ +.editor__textarea { + outline: 0; +} +.editor__textarea::placeholder { + color: rgba(120, 120, 120, 0.5); +} diff --git a/packages/ui/src/ui-component/dialog/EditPromptValuesDialog.js b/packages/ui/src/ui-component/dialog/EditPromptValuesDialog.js new file mode 100644 index 000000000..199b13067 --- /dev/null +++ b/packages/ui/src/ui-component/dialog/EditPromptValuesDialog.js @@ -0,0 +1,256 @@ +import { createPortal } from 'react-dom' +import { useState, useEffect } from 'react' +import { useSelector } from 'react-redux' +import PropTypes from 'prop-types' +import { + Button, + Dialog, + DialogActions, + DialogContent, + Box, + List, + ListItemButton, + ListItem, + ListItemAvatar, + ListItemText, + Typography, + Stack +} from '@mui/material' +import { useTheme } from '@mui/material/styles' +import PerfectScrollbar from 'react-perfect-scrollbar' +import { StyledButton } from 'ui-component/button/StyledButton' +import { DarkCodeEditor } from 'ui-component/editor/DarkCodeEditor' +import { LightCodeEditor } from 'ui-component/editor/LightCodeEditor' + +import './EditPromptValuesDialog.css' +import { baseURL } from 'store/constant' + +const EditPromptValuesDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const languageType = 'json' + + const [inputValue, setInputValue] = useState('') + const [inputParam, setInputParam] = useState(null) + const [textCursorPosition, setTextCursorPosition] = useState({}) + + useEffect(() => { + if (dialogProps.value) setInputValue(dialogProps.value) + if (dialogProps.inputParam) setInputParam(dialogProps.inputParam) + + return () => { + setInputValue('') + setInputParam(null) + setTextCursorPosition({}) + } + }, [dialogProps]) + + const onMouseUp = (e) => { + if (e.target && e.target.selectionEnd && e.target.value) { + const cursorPosition = e.target.selectionEnd + const textBeforeCursorPosition = e.target.value.substring(0, cursorPosition) + const textAfterCursorPosition = e.target.value.substring(cursorPosition, e.target.value.length) + const body = { + textBeforeCursorPosition, + textAfterCursorPosition + } + setTextCursorPosition(body) + } else { + setTextCursorPosition({}) + } + } + + const onSelectOutputResponseClick = (node, isUserQuestion = false) => { + let variablePath = isUserQuestion ? `question` : `${node.id}.data.instance` + if (textCursorPosition) { + let newInput = '' + if (textCursorPosition.textBeforeCursorPosition === undefined && textCursorPosition.textAfterCursorPosition === undefined) + newInput = `${inputValue}${`{{${variablePath}}}`}` + else newInput = `${textCursorPosition.textBeforeCursorPosition}{{${variablePath}}}${textCursorPosition.textAfterCursorPosition}` + setInputValue(newInput) + } + } + + const component = show ? ( + + +
+ {inputParam && inputParam.type === 'string' && ( +
+ + {inputParam.label} + + + {customization.isDarkMode ? ( + setInputValue(code)} + placeholder={inputParam.placeholder} + type={languageType} + onMouseUp={(e) => onMouseUp(e)} + onBlur={(e) => onMouseUp(e)} + style={{ + fontSize: '0.875rem', + minHeight: 'calc(100vh - 220px)', + width: '100%' + }} + /> + ) : ( + setInputValue(code)} + placeholder={inputParam.placeholder} + type={languageType} + onMouseUp={(e) => onMouseUp(e)} + onBlur={(e) => onMouseUp(e)} + style={{ + fontSize: '0.875rem', + minHeight: 'calc(100vh - 220px)', + width: '100%' + }} + /> + )} + +
+ )} + {!dialogProps.disabled && inputParam && inputParam.acceptVariable && ( +
+ + Select Variable + + + + + onSelectOutputResponseClick(null, true)} + > + + +
+ AI +
+
+ +
+
+ {dialogProps.availableNodesForVariable && + dialogProps.availableNodesForVariable.length > 0 && + dialogProps.availableNodesForVariable.map((node, index) => { + const selectedOutputAnchor = node.data.outputAnchors[0].options.find( + (ancr) => ancr.name === node.data.outputs['output'] + ) + return ( + onSelectOutputResponseClick(node)} + > + + +
+ {node.data.name} +
+
+ +
+
+ ) + })} +
+
+
+
+ )} +
+
+ + + onConfirm(inputValue, inputParam.name)}> + {dialogProps.confirmButtonName} + + +
+ ) : null + + return createPortal(component, portalElement) +} + +EditPromptValuesDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default EditPromptValuesDialog diff --git a/packages/ui/src/ui-component/dropdown/Dropdown.js b/packages/ui/src/ui-component/dropdown/Dropdown.js index 691546c42..12d10bef5 100644 --- a/packages/ui/src/ui-component/dropdown/Dropdown.js +++ b/packages/ui/src/ui-component/dropdown/Dropdown.js @@ -18,7 +18,7 @@ const StyledPopper = styled(Popper)({ } }) -export const Dropdown = ({ name, value, options, onSelect, disabled = false }) => { +export const Dropdown = ({ name, value, options, onSelect, disabled = false, disableClearable = false }) => { const customization = useSelector((state) => state.customization) const findMatchingOptions = (options = [], value) => options.find((option) => option.name === value) const getDefaultOptionValue = () => '' @@ -29,6 +29,7 @@ export const Dropdown = ({ name, value, options, onSelect, disabled = false }) = { +export const DarkCodeEditor = ({ value, placeholder, disabled = false, type, style, onValueChange, onMouseUp, onBlur }) => { const theme = useTheme() return ( highlight(code, type === 'json' ? languages.json : languages.js)} @@ -32,6 +33,7 @@ export const DarkCodeEditor = ({ value, placeholder, type, style, onValueChange, DarkCodeEditor.propTypes = { value: PropTypes.string, placeholder: PropTypes.string, + disabled: PropTypes.bool, type: PropTypes.string, style: PropTypes.object, onValueChange: PropTypes.func, diff --git a/packages/ui/src/ui-component/editor/LightCodeEditor.js b/packages/ui/src/ui-component/editor/LightCodeEditor.js index 36d56fb82..86f7057df 100644 --- a/packages/ui/src/ui-component/editor/LightCodeEditor.js +++ b/packages/ui/src/ui-component/editor/LightCodeEditor.js @@ -8,11 +8,12 @@ import './prism-light.css' import PropTypes from 'prop-types' import { useTheme } from '@mui/material/styles' -export const LightCodeEditor = ({ value, placeholder, type, style, onValueChange, onMouseUp, onBlur }) => { +export const LightCodeEditor = ({ value, placeholder, disabled = false, type, style, onValueChange, onMouseUp, onBlur }) => { const theme = useTheme() return ( highlight(code, type === 'json' ? languages.json : languages.js)} @@ -32,6 +33,7 @@ export const LightCodeEditor = ({ value, placeholder, type, style, onValueChange LightCodeEditor.propTypes = { value: PropTypes.string, placeholder: PropTypes.string, + disabled: PropTypes.bool, type: PropTypes.string, style: PropTypes.object, onValueChange: PropTypes.func, diff --git a/packages/ui/src/ui-component/input/Input.js b/packages/ui/src/ui-component/input/Input.js index 13b457f70..f04c086ad 100644 --- a/packages/ui/src/ui-component/input/Input.js +++ b/packages/ui/src/ui-component/input/Input.js @@ -1,28 +1,53 @@ import { useState } from 'react' import PropTypes from 'prop-types' import { FormControl, OutlinedInput } from '@mui/material' +import EditPromptValuesDialog from 'ui-component/dialog/EditPromptValuesDialog' -export const Input = ({ inputParam, value, onChange, disabled = false }) => { +export const Input = ({ inputParam, value, onChange, disabled = false, showDialog, dialogProps, onDialogCancel, onDialogConfirm }) => { const [myValue, setMyValue] = useState(value ?? '') + + const getInputType = (type) => { + switch (type) { + case 'string': + return 'text' + case 'password': + return 'password' + case 'number': + return 'number' + default: + return 'text' + } + } + return ( - - { - setMyValue(e.target.value) - onChange(e.target.value) + <> + + { + setMyValue(e.target.value) + onChange(e.target.value) + }} + /> + + { + setMyValue(newValue) + onDialogConfirm(newValue, inputParamName) }} - /> - + > + ) } @@ -30,5 +55,9 @@ Input.propTypes = { inputParam: PropTypes.object, value: PropTypes.string, onChange: PropTypes.func, - disabled: PropTypes.bool + disabled: PropTypes.bool, + showDialog: PropTypes.bool, + dialogProps: PropTypes.object, + onDialogCancel: PropTypes.func, + onDialogConfirm: PropTypes.func } diff --git a/packages/ui/src/ui-component/tooltip/TooltipWithParser.js b/packages/ui/src/ui-component/tooltip/TooltipWithParser.js index e379eb18e..a47083779 100644 --- a/packages/ui/src/ui-component/tooltip/TooltipWithParser.js +++ b/packages/ui/src/ui-component/tooltip/TooltipWithParser.js @@ -9,13 +9,9 @@ export const TooltipWithParser = ({ title }) => { return ( -
- - - -
+ + +
) } diff --git a/packages/ui/src/utils/genericHelper.js b/packages/ui/src/utils/genericHelper.js index 749e12021..a08eec39c 100644 --- a/packages/ui/src/utils/genericHelper.js +++ b/packages/ui/src/utils/genericHelper.js @@ -22,23 +22,12 @@ export const getUniqueNodeId = (nodeData, nodes) => { return nodeId } -export const initializeNodeData = (nodeParams) => { +export const initializeDefaultNodeData = (nodeParams) => { const initialValues = {} for (let i = 0; i < nodeParams.length; i += 1) { const input = nodeParams[i] - - // Load from nodeParams default values initialValues[input.name] = input.default || '' - - // Special case for array, always initialize the item if default is not set - if (input.type === 'array' && !input.default) { - const newObj = {} - for (let j = 0; j < input.array.length; j += 1) { - newObj[input.array[j].name] = input.array[j].default || '' - } - initialValues[input.name] = [newObj] - } } return initialValues @@ -46,62 +35,118 @@ export const initializeNodeData = (nodeParams) => { export const initNode = (nodeData, newNodeId) => { const inputAnchors = [] + const inputParams = [] const incoming = nodeData.inputs ? nodeData.inputs.length : 0 const outgoing = 1 - const whitelistTypes = ['asyncOptions', 'options', 'string', 'number', 'boolean', 'password', 'json', 'code', 'date', 'file', 'folder'] + const whitelistTypes = ['options', 'string', 'number', 'boolean', 'password', 'json', 'code', 'date', 'file', 'folder'] for (let i = 0; i < incoming; i += 1) { - if (whitelistTypes.includes(nodeData.inputs[i].type)) continue const newInput = { ...nodeData.inputs[i], id: `${newNodeId}-input-${nodeData.inputs[i].name}-${nodeData.inputs[i].type}` } - inputAnchors.push(newInput) + if (whitelistTypes.includes(nodeData.inputs[i].type)) { + inputParams.push(newInput) + } else { + inputAnchors.push(newInput) + } } const outputAnchors = [] for (let i = 0; i < outgoing; i += 1) { - const newOutput = { - id: `${newNodeId}-output-${nodeData.name}-${nodeData.baseClasses.join('|')}`, - name: nodeData.name, - label: nodeData.type, - type: nodeData.baseClasses.join(' | ') + if (nodeData.outputs && nodeData.outputs.length) { + const options = [] + for (let j = 0; j < nodeData.outputs.length; j += 1) { + let baseClasses = '' + let type = '' + + if (whitelistTypes.includes(nodeData.outputs[j].type)) { + baseClasses = nodeData.outputs[j].type + type = nodeData.outputs[j].type + } else { + baseClasses = nodeData.baseClasses.join('|') + type = nodeData.baseClasses.join(' | ') + } + + const newOutputOption = { + id: `${newNodeId}-output-${nodeData.outputs[j].name}-${baseClasses}`, + name: nodeData.outputs[j].name, + label: nodeData.outputs[j].label, + type + } + options.push(newOutputOption) + } + const newOutput = { + name: 'output', + label: 'Output', + type: 'options', + options, + default: nodeData.outputs[0].name + } + outputAnchors.push(newOutput) + } else { + const newOutput = { + id: `${newNodeId}-output-${nodeData.name}-${nodeData.baseClasses.join('|')}`, + name: nodeData.name, + label: nodeData.type, + type: nodeData.baseClasses.join(' | ') + } + outputAnchors.push(newOutput) } - outputAnchors.push(newOutput) } - nodeData.id = newNodeId - nodeData.inputAnchors = inputAnchors - nodeData.outputAnchors = outputAnchors - - /* - Initial inputs = [ + /* Initial + inputs = [ { - label: 'field_label', - name: 'field' + label: 'field_label_1', + name: 'string' + }, + { + label: 'field_label_2', + name: 'CustomType' } ] - // Turn into inputs object with default values - Converted inputs = { 'field': 'defaultvalue' } + => Convert to inputs, inputParams, inputAnchors + + => inputs = { 'field': 'defaultvalue' } // Turn into inputs object with default values - // Move remaining inputs that are not part of inputAnchors to inputParams - inputParams = [ - { - label: 'field_label', - name: 'field' - } - ] + => // For inputs that are part of whitelistTypes + inputParams = [ + { + label: 'field_label_1', + name: 'string' + } + ] + + => // For inputs that are not part of whitelistTypes + inputAnchors = [ + { + label: 'field_label_2', + name: 'CustomType' + } + ] */ if (nodeData.inputs) { - nodeData.inputParams = nodeData.inputs.filter(({ name }) => !nodeData.inputAnchors.some((exclude) => exclude.name === name)) - nodeData.inputs = initializeNodeData(nodeData.inputs) + nodeData.inputAnchors = inputAnchors + nodeData.inputParams = inputParams + nodeData.inputs = initializeDefaultNodeData(nodeData.inputs) } else { + nodeData.inputAnchors = [] nodeData.inputParams = [] nodeData.inputs = {} } + if (nodeData.outputs) { + nodeData.outputs = initializeDefaultNodeData(outputAnchors) + } else { + nodeData.outputs = {} + } + + nodeData.outputAnchors = outputAnchors + nodeData.id = newNodeId + return nodeData } @@ -133,7 +178,9 @@ export const isValidConnection = (connection, reactFlowInstance) => { return true } } else { - const targetNodeInputAnchor = targetNode.data.inputAnchors.find((ancr) => ancr.id === targetHandle) + const targetNodeInputAnchor = + targetNode.data.inputAnchors.find((ancr) => ancr.id === targetHandle) || + targetNode.data.inputParams.find((ancr) => ancr.id === targetHandle) if ( (targetNodeInputAnchor && !targetNodeInputAnchor?.list && @@ -144,7 +191,6 @@ export const isValidConnection = (connection, reactFlowInstance) => { } } } - return false } @@ -200,6 +246,7 @@ export const generateExportFlowData = (flowData) => { inputAnchors: node.data.inputAnchors, inputs: {}, outputAnchors: node.data.outputAnchors, + outputs: node.data.outputs, selected: false } @@ -225,11 +272,16 @@ export const generateExportFlowData = (flowData) => { return exportJson } -export const copyToClipboard = (e) => { - const src = e.src - if (Array.isArray(src) || typeof src === 'object') { - navigator.clipboard.writeText(JSON.stringify(src, null, ' ')) - } else { - navigator.clipboard.writeText(src) +export const getAvailableNodesForVariable = (nodes, edges, target, targetHandle) => { + // example edge id = "llmChain_0-llmChain_0-output-outputPrediction-string-llmChain_1-llmChain_1-input-promptValues-string" + // {source} -{sourceHandle} -{target} -{targetHandle} + const parentNodes = [] + const inputEdges = edges.filter((edg) => edg.target === target && edg.targetHandle === targetHandle) + if (inputEdges && inputEdges.length) { + for (let j = 0; j < inputEdges.length; j += 1) { + const node = nodes.find((nd) => nd.id === inputEdges[j].source) + parentNodes.push(node) + } } + return parentNodes } diff --git a/packages/ui/src/views/canvas/CanvasNode.js b/packages/ui/src/views/canvas/CanvasNode.js index 1d4735211..75e2f7dae 100644 --- a/packages/ui/src/views/canvas/CanvasNode.js +++ b/packages/ui/src/views/canvas/CanvasNode.js @@ -12,7 +12,7 @@ import NodeOutputHandler from './NodeOutputHandler' // const import { baseURL } from 'store/constant' -import { IconTrash } from '@tabler/icons' +import { IconTrash, IconCopy } from '@tabler/icons' import { flowContext } from 'store/context/ReactFlowContext' const CardWrapper = styled(MainCard)(({ theme }) => ({ @@ -33,7 +33,7 @@ const CardWrapper = styled(MainCard)(({ theme }) => ({ const CanvasNode = ({ data }) => { const theme = useTheme() - const { deleteNode } = useContext(flowContext) + const { deleteNode, duplicateNode } = useContext(flowContext) return ( <> @@ -76,10 +76,22 @@ const CanvasNode = ({ data }) => {
{ + duplicateNode(data.id) + }} + sx={{ height: 35, width: 35, '&:hover': { color: theme?.palette.primary.main } }} + color={theme?.customization?.isDarkMode ? theme.colors?.paper : 'inherit'} + > + + + { deleteNode(data.id) }} - sx={{ height: 35, width: 35, mr: 1 }} + sx={{ height: 35, width: 35, mr: 1, '&:hover': { color: 'red' } }} + color={theme?.customization?.isDarkMode ? theme.colors?.paper : 'inherit'} > diff --git a/packages/ui/src/views/canvas/NodeInputHandler.js b/packages/ui/src/views/canvas/NodeInputHandler.js index 019cddeb9..4698f572d 100644 --- a/packages/ui/src/views/canvas/NodeInputHandler.js +++ b/packages/ui/src/views/canvas/NodeInputHandler.js @@ -4,13 +4,16 @@ import { useEffect, useRef, useState, useContext } from 'react' // material-ui import { useTheme, styled } from '@mui/material/styles' -import { Box, Typography, Tooltip } from '@mui/material' +import { Box, Typography, Tooltip, IconButton } from '@mui/material' import { tooltipClasses } from '@mui/material/Tooltip' +import { IconArrowsMaximize } from '@tabler/icons' + +// project import import { Dropdown } from 'ui-component/dropdown/Dropdown' import { Input } from 'ui-component/input/Input' import { File } from 'ui-component/file/File' import { flowContext } from 'store/context/ReactFlowContext' -import { isValidConnection } from 'utils/genericHelper' +import { isValidConnection, getAvailableNodesForVariable } from 'utils/genericHelper' const CustomWidthTooltip = styled(({ className, ...props }) => )({ [`& .${tooltipClasses.tooltip}`]: { @@ -23,9 +26,35 @@ const CustomWidthTooltip = styled(({ className, ...props }) => { const theme = useTheme() const ref = useRef(null) + const { reactFlowInstance } = useContext(flowContext) const updateNodeInternals = useUpdateNodeInternals() const [position, setPosition] = useState(0) - const { reactFlowInstance } = useContext(flowContext) + const [showExpandDialog, setShowExpandDialog] = useState(false) + const [expandDialogProps, setExpandDialogProps] = useState({}) + + const onExpandDialogClicked = (value, inputParam) => { + const dialogProp = { + value, + inputParam, + disabled, + confirmButtonName: 'Save', + cancelButtonName: 'Cancel' + } + + if (!disabled) { + const nodes = reactFlowInstance.getNodes() + const edges = reactFlowInstance.getEdges() + const nodesForVariable = inputParam.acceptVariable ? getAvailableNodesForVariable(nodes, edges, data.id, inputParam.id) : [] + dialogProp.availableNodesForVariable = nodesForVariable + } + setExpandDialogProps(dialogProp) + setShowExpandDialog(true) + } + + const onExpandDialogSave = (newValue, inputParamName) => { + setShowExpandDialog(false) + data.inputs[inputParamName] = newValue + } useEffect(() => { if (ref.current && ref.current.offsetTop && ref.current.clientHeight) { @@ -68,11 +97,47 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false }) = {inputParam && ( <> + {inputParam.acceptVariable && ( + + isValidConnection(connection, reactFlowInstance)} + style={{ + height: 10, + width: 10, + backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary, + top: position + }} + /> + + )} - - {inputParam.label} - {!inputParam.optional &&  *} - +
+ + {inputParam.label} + {!inputParam.optional &&  *} + +
+ {inputParam.type === 'string' && inputParam.rows && ( + + onExpandDialogClicked(data.inputs[inputParam.name] ?? inputParam.default ?? '', inputParam) + } + > + + + )} +
{inputParam.type === 'file' && ( (data.inputs[inputParam.name] = newValue)} value={data.inputs[inputParam.name] ?? inputParam.default ?? ''} + showDialog={showExpandDialog} + dialogProps={expandDialogProps} + onDialogCancel={() => setShowExpandDialog(false)} + onDialogConfirm={(newValue, inputParamName) => onExpandDialogSave(newValue, inputParamName)} /> )} {inputParam.type === 'options' && ( diff --git a/packages/ui/src/views/canvas/NodeOutputHandler.js b/packages/ui/src/views/canvas/NodeOutputHandler.js index 62babb7ba..c5fc1345d 100644 --- a/packages/ui/src/views/canvas/NodeOutputHandler.js +++ b/packages/ui/src/views/canvas/NodeOutputHandler.js @@ -8,6 +8,7 @@ import { Box, Typography, Tooltip } from '@mui/material' import { tooltipClasses } from '@mui/material/Tooltip' import { flowContext } from 'store/context/ReactFlowContext' import { isValidConnection } from 'utils/genericHelper' +import { Dropdown } from 'ui-component/dropdown/Dropdown' const CustomWidthTooltip = styled(({ className, ...props }) => )({ [`& .${tooltipClasses.tooltip}`]: { @@ -17,11 +18,12 @@ const CustomWidthTooltip = styled(({ className, ...props }) => { +const NodeOutputHandler = ({ outputAnchor, data, disabled = false }) => { const theme = useTheme() const ref = useRef(null) const updateNodeInternals = useUpdateNodeInternals() const [position, setPosition] = useState(0) + const [dropdownValue, setDropdownValue] = useState(null) const { reactFlowInstance } = useContext(flowContext) useEffect(() => { @@ -39,33 +41,82 @@ const NodeOutputHandler = ({ outputAnchor, data }) => { }, 0) }, [data.id, position, updateNodeInternals]) + useEffect(() => { + if (dropdownValue) { + setTimeout(() => { + updateNodeInternals(data.id) + }, 0) + } + }, [data.id, dropdownValue, updateNodeInternals]) + return (
- - isValidConnection(connection, reactFlowInstance)} - style={{ - height: 10, - width: 10, - backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary, - top: position - }} - /> - - - {outputAnchor.label} - + {outputAnchor.type !== 'options' && !outputAnchor.options && ( + <> + + isValidConnection(connection, reactFlowInstance)} + style={{ + height: 10, + width: 10, + backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary, + top: position + }} + /> + + + {outputAnchor.label} + + + )} + {outputAnchor.type === 'options' && outputAnchor.options && outputAnchor.options.length > 0 && ( + <> + opt.name === data.outputs?.[outputAnchor.name])?.type ?? outputAnchor.type + } + > + opt.name === data.outputs?.[outputAnchor.name])?.id ?? ''} + isValidConnection={(connection) => isValidConnection(connection, reactFlowInstance)} + style={{ + height: 10, + width: 10, + backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary, + top: position + }} + /> + + + { + setDropdownValue(newValue) + data.outputs[outputAnchor.name] = newValue + }} + value={data.outputs[outputAnchor.name] ?? outputAnchor.default ?? 'choose an option'} + /> + + + )}
) } NodeOutputHandler.propTypes = { outputAnchor: PropTypes.object, - data: PropTypes.object + data: PropTypes.object, + disabled: PropTypes.bool } export default NodeOutputHandler diff --git a/packages/ui/src/views/canvas/index.js b/packages/ui/src/views/canvas/index.js index edbfbacc3..e42f0ab82 100644 --- a/packages/ui/src/views/canvas/index.js +++ b/packages/ui/src/views/canvas/index.js @@ -108,10 +108,14 @@ const Canvas = () => { setTimeout(() => setDirty(), 0) let value const inputAnchor = node.data.inputAnchors.find((ancr) => ancr.name === targetInput) + const inputParam = node.data.inputParams.find((param) => param.name === targetInput) + if (inputAnchor && inputAnchor.list) { const newValues = node.data.inputs[targetInput] || [] newValues.push(`{{${sourceNodeId}.data.instance}}`) value = newValues + } else if (inputParam && inputParam.acceptVariable) { + value = node.data.inputs[targetInput] || '' } else { value = `{{${sourceNodeId}.data.instance}}` } diff --git a/packages/ui/src/views/marketplaces/MarketplaceCanvasNode.js b/packages/ui/src/views/marketplaces/MarketplaceCanvasNode.js index b48de32ed..d27a7df4a 100644 --- a/packages/ui/src/views/marketplaces/MarketplaceCanvasNode.js +++ b/packages/ui/src/views/marketplaces/MarketplaceCanvasNode.js @@ -88,7 +88,7 @@ const MarketplaceCanvasNode = ({ data }) => { )} {data.inputAnchors.map((inputAnchor, index) => ( - + ))} {data.inputParams.map((inputParam, index) => ( @@ -108,7 +108,7 @@ const MarketplaceCanvasNode = ({ data }) => { {data.outputAnchors.map((outputAnchor, index) => ( - + ))}