diff --git a/package.json b/package.json index 1d1b2d3e1..af805599b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "flowise", - "version": "1.2.4", + "version": "1.2.5", "private": true, "homepage": "https://flowiseai.com", "workspaces": [ diff --git a/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts b/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts index dfb9b5d5c..5d608c5e2 100644 --- a/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts +++ b/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts @@ -1,7 +1,6 @@ -import { OpenAIChatInput } from 'langchain/llms/openai' import { INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses } from '../../../src/utils' -import { ChatOpenAI } from 'langchain/chat_models/openai' +import { ChatOpenAI, OpenAIChatInput } from 'langchain/chat_models/openai' class ChatOpenAI_ChatModels implements INode { label: string diff --git a/packages/components/nodes/embeddings/CohereEmbedding/CohereEmbedding.ts b/packages/components/nodes/embeddings/CohereEmbedding/CohereEmbedding.ts index 923cf6c64..344713a48 100644 --- a/packages/components/nodes/embeddings/CohereEmbedding/CohereEmbedding.ts +++ b/packages/components/nodes/embeddings/CohereEmbedding/CohereEmbedding.ts @@ -1,6 +1,6 @@ import { INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses } from '../../../src/utils' -import { CohereEmbeddings } from 'langchain/embeddings/cohere' +import { CohereEmbeddings, CohereEmbeddingsParams } from 'langchain/embeddings/cohere' class CohereEmbedding_Embeddings implements INode { label: string @@ -25,14 +25,42 @@ class CohereEmbedding_Embeddings implements INode { label: 'Cohere API Key', name: 'cohereApiKey', type: 'password' + }, + { + label: 'Model Name', + name: 'modelName', + type: 'options', + options: [ + { + label: 'embed-english-v2.0', + name: 'embed-english-v2.0' + }, + { + label: 'embed-english-light-v2.0', + name: 'embed-english-light-v2.0' + }, + { + label: 'embed-multilingual-v2.0', + name: 'embed-multilingual-v2.0' + } + ], + default: 'embed-english-v2.0', + optional: true } ] } async init(nodeData: INodeData): Promise { const apiKey = nodeData.inputs?.cohereApiKey as string + const modelName = nodeData.inputs?.modelName as string - const model = new CohereEmbeddings({ apiKey }) + const obj: Partial & { apiKey?: string } = { + apiKey + } + + if (modelName) obj.modelName = modelName + + const model = new CohereEmbeddings(obj) return model } } diff --git a/packages/components/nodes/llms/Cohere/Cohere.ts b/packages/components/nodes/llms/Cohere/Cohere.ts new file mode 100644 index 000000000..dc632ec31 --- /dev/null +++ b/packages/components/nodes/llms/Cohere/Cohere.ts @@ -0,0 +1,97 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { Cohere, CohereInput } from 'langchain/llms/cohere' + +class Cohere_LLMs implements INode { + label: string + name: string + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Cohere' + this.name = 'cohere' + this.type = 'Cohere' + this.icon = 'cohere.png' + this.category = 'LLMs' + this.description = 'Wrapper around Cohere large language models' + this.baseClasses = [this.type, ...getBaseClasses(Cohere)] + this.inputs = [ + { + label: 'Cohere Api Key', + name: 'cohereApiKey', + type: 'password' + }, + { + label: 'Model Name', + name: 'modelName', + type: 'options', + options: [ + { + label: 'command', + name: 'command' + }, + { + label: 'command-light', + name: 'command-light' + }, + { + label: 'command-nightly', + name: 'command-nightly' + }, + { + label: 'command-light-nightly', + name: 'command-light-nightly' + }, + { + label: 'base', + name: 'base' + }, + { + label: 'base-light', + name: 'base-light' + } + ], + default: 'command', + optional: true + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + default: 0.7, + optional: true + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + optional: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const apiKey = nodeData.inputs?.cohereApiKey as string + const maxTokens = nodeData.inputs?.maxTokens as string + + const obj: CohereInput = { + apiKey + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (modelName) obj.model = modelName + if (temperature) obj.temperature = parseInt(temperature, 10) + + const model = new Cohere(obj) + return model + } +} + +module.exports = { nodeClass: Cohere_LLMs } diff --git a/packages/components/nodes/llms/Cohere/cohere.png b/packages/components/nodes/llms/Cohere/cohere.png new file mode 100644 index 000000000..266adeac2 Binary files /dev/null and b/packages/components/nodes/llms/Cohere/cohere.png differ diff --git a/packages/components/nodes/tools/WebBrowser/WebBrowser.ts b/packages/components/nodes/tools/WebBrowser/WebBrowser.ts new file mode 100644 index 000000000..09478047a --- /dev/null +++ b/packages/components/nodes/tools/WebBrowser/WebBrowser.ts @@ -0,0 +1,47 @@ +import { BaseLanguageModel } from 'langchain/base_language' +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { WebBrowser } from 'langchain/tools/webbrowser' +import { Embeddings } from 'langchain/embeddings/base' + +class WebBrowser_Tools implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Web Browser' + this.name = 'webBrowser' + this.type = 'WebBrowser' + this.icon = 'webBrowser.svg' + this.category = 'Tools' + this.description = 'Gives agent the ability to visit a website and extract information' + this.inputs = [ + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + } + ] + this.baseClasses = [this.type, ...getBaseClasses(WebBrowser)] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + const embeddings = nodeData.inputs?.embeddings as Embeddings + + return new WebBrowser({ model, embeddings }) + } +} + +module.exports = { nodeClass: WebBrowser_Tools } diff --git a/packages/components/nodes/tools/WebBrowser/webBrowser.svg b/packages/components/nodes/tools/WebBrowser/webBrowser.svg new file mode 100644 index 000000000..01eea4f2a --- /dev/null +++ b/packages/components/nodes/tools/WebBrowser/webBrowser.svg @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/packages/components/package.json b/packages/components/package.json index b06b81441..0e13c4fb3 100644 --- a/packages/components/package.json +++ b/packages/components/package.json @@ -1,6 +1,6 @@ { "name": "flowise-components", - "version": "1.2.5", + "version": "1.2.6", "description": "Flowiseai Components", "main": "dist/src/index", "types": "dist/src/index.d.ts", @@ -29,7 +29,7 @@ "express": "^4.17.3", "form-data": "^4.0.0", "graphql": "^16.6.0", - "langchain": "^0.0.66", + "langchain": "^0.0.73", "mammoth": "^1.5.1", "moment": "^2.29.3", "node-fetch": "2", diff --git a/packages/server/marketplaces/WebBrowser.json b/packages/server/marketplaces/WebBrowser.json new file mode 100644 index 000000000..f1a3638d2 --- /dev/null +++ b/packages/server/marketplaces/WebBrowser.json @@ -0,0 +1,590 @@ +{ + "description": "Conversational Agent with ability to visit a website and extract information", + "nodes": [ + { + "width": 300, + "height": 383, + "id": "conversationalAgent_0", + "position": { + "x": 1527.4605987432717, + "y": 242.32866622482635 + }, + "type": "customNode", + "data": { + "id": "conversationalAgent_0", + "label": "Conversational Agent", + "name": "conversationalAgent", + "type": "AgentExecutor", + "baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain"], + "category": "Agents", + "description": "Conversational agent for a chat model. It will utilize chat specific prompts", + "inputParams": [ + { + "label": "System Message", + "name": "systemMessage", + "type": "string", + "rows": 4, + "optional": true, + "additionalParams": true, + "id": "conversationalAgent_0-input-systemMessage-string" + }, + { + "label": "Human Message", + "name": "humanMessage", + "type": "string", + "rows": 4, + "optional": true, + "additionalParams": true, + "id": "conversationalAgent_0-input-humanMessage-string" + } + ], + "inputAnchors": [ + { + "label": "Allowed Tools", + "name": "tools", + "type": "Tool", + "list": true, + "id": "conversationalAgent_0-input-tools-Tool" + }, + { + "label": "Chat Model", + "name": "model", + "type": "BaseChatModel", + "id": "conversationalAgent_0-input-model-BaseChatModel" + }, + { + "label": "Memory", + "name": "memory", + "type": "BaseChatMemory", + "id": "conversationalAgent_0-input-memory-BaseChatMemory" + } + ], + "inputs": { + "tools": ["{{webBrowser_0.data.instance}}"], + "model": "{{chatOpenAI_0.data.instance}}", + "memory": "{{bufferMemory_0.data.instance}}", + "systemMessage": "", + "humanMessage": "" + }, + "outputAnchors": [ + { + "id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|BaseLangChain", + "name": "conversationalAgent", + "label": "AgentExecutor", + "type": "AgentExecutor | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 1527.4605987432717, + "y": 242.32866622482635 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 524, + "id": "chatOpenAI_0", + "position": { + "x": 348.0817836845733, + "y": -86.56099395751443 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_0-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 348.0817836845733, + "y": -86.56099395751443 + }, + "dragging": false + }, + { + "width": 300, + "height": 376, + "id": "bufferMemory_0", + "position": { + "x": 15.045898260926037, + "y": 114.13407401971622 + }, + "type": "customNode", + "data": { + "id": "bufferMemory_0", + "label": "Buffer Memory", + "name": "bufferMemory", + "type": "BufferMemory", + "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"], + "category": "Memory", + "description": "Remembers previous conversational back and forths directly", + "inputParams": [ + { + "label": "Memory Key", + "name": "memoryKey", + "type": "string", + "default": "chat_history", + "id": "bufferMemory_0-input-memoryKey-string" + }, + { + "label": "Input Key", + "name": "inputKey", + "type": "string", + "default": "input", + "id": "bufferMemory_0-input-inputKey-string" + } + ], + "inputAnchors": [], + "inputs": { + "memoryKey": "chat_history", + "inputKey": "input" + }, + "outputAnchors": [ + { + "id": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "name": "bufferMemory", + "label": "BufferMemory", + "type": "BufferMemory | BaseChatMemory | BaseMemory" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 15.045898260926037, + "y": 114.13407401971622 + }, + "dragging": false + }, + { + "width": 300, + "height": 330, + "id": "openAIEmbeddings_0", + "position": { + "x": 693.9266260641734, + "y": 37.098856540087496 + }, + "type": "customNode", + "data": { + "id": "openAIEmbeddings_0", + "label": "OpenAI Embeddings", + "name": "openAIEmbeddings", + "type": "OpenAIEmbeddings", + "baseClasses": ["OpenAIEmbeddings", "Embeddings"], + "category": "Embeddings", + "description": "OpenAI API to generate embeddings for a given text", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAIEmbeddings_0-input-openAIApiKey-password" + }, + { + "label": "Strip New Lines", + "name": "stripNewLines", + "type": "boolean", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-stripNewLines-boolean" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-timeout-number" + } + ], + "inputAnchors": [], + "inputs": { + "stripNewLines": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "name": "openAIEmbeddings", + "label": "OpenAIEmbeddings", + "type": "OpenAIEmbeddings | Embeddings" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 693.9266260641734, + "y": 37.098856540087496 + }, + "dragging": false + }, + { + "width": 300, + "height": 524, + "id": "chatOpenAI_1", + "position": { + "x": 691.5132411896494, + "y": -533.1696369549378 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_1", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_1-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-timeout-number" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 691.5132411896494, + "y": -533.1696369549378 + }, + "dragging": false + }, + { + "width": 300, + "height": 280, + "id": "webBrowser_0", + "position": { + "x": 1091.0866823400172, + "y": -16.43806989958216 + }, + "type": "customNode", + "data": { + "id": "webBrowser_0", + "label": "Web Browser", + "name": "webBrowser", + "type": "WebBrowser", + "baseClasses": ["WebBrowser", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Gives agent the ability to visit a website and extract information", + "inputParams": [], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "webBrowser_0-input-model-BaseLanguageModel" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "webBrowser_0-input-embeddings-Embeddings" + } + ], + "inputs": { + "model": "{{chatOpenAI_1.data.instance}}", + "embeddings": "{{openAIEmbeddings_0.data.instance}}" + }, + "outputAnchors": [ + { + "id": "webBrowser_0-output-webBrowser-WebBrowser|Tool|StructuredTool|BaseLangChain", + "name": "webBrowser", + "label": "WebBrowser", + "type": "WebBrowser | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1091.0866823400172, + "y": -16.43806989958216 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "bufferMemory_0", + "sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-memory-BaseChatMemory", + "type": "buttonedge", + "id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationalAgent_0-conversationalAgent_0-input-memory-BaseChatMemory", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_1", + "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "webBrowser_0", + "targetHandle": "webBrowser_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-webBrowser_0-webBrowser_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "webBrowser_0", + "targetHandle": "webBrowser_0-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-webBrowser_0-webBrowser_0-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-model-BaseChatModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-model-BaseChatModel", + "data": { + "label": "" + } + }, + { + "source": "webBrowser_0", + "sourceHandle": "webBrowser_0-output-webBrowser-WebBrowser|Tool|StructuredTool|BaseLangChain", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-tools-Tool", + "type": "buttonedge", + "id": "webBrowser_0-webBrowser_0-output-webBrowser-WebBrowser|Tool|StructuredTool|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-tools-Tool", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/package.json b/packages/server/package.json index c30ac8c1f..2886e6e83 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,6 +1,6 @@ { "name": "flowise", - "version": "1.2.4", + "version": "1.2.5", "description": "Flowiseai Server", "main": "dist/index", "types": "dist/index.d.ts", diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts index 8f668e124..ab6a3c84f 100644 --- a/packages/server/src/utils/index.ts +++ b/packages/server/src/utils/index.ts @@ -404,15 +404,20 @@ export const isSameOverrideConfig = ( existingOverrideConfig?: ICommonObject, newOverrideConfig?: ICommonObject ): boolean => { + // Skip check if its internal call if (isInternal) return true + // If existing and new overrideconfig are the same if ( existingOverrideConfig && Object.keys(existingOverrideConfig).length && newOverrideConfig && Object.keys(newOverrideConfig).length && JSON.stringify(existingOverrideConfig) === JSON.stringify(newOverrideConfig) - ) + ) { return true + } + // If there is no existing and new overrideconfig + if (!existingOverrideConfig && !newOverrideConfig) return true return false } diff --git a/packages/ui/package.json b/packages/ui/package.json index 1e22bca3e..44f0707ad 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,6 +1,6 @@ { "name": "flowise-ui", - "version": "1.2.3", + "version": "1.2.4", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://flowiseai.com", "author": { diff --git a/packages/ui/src/assets/images/embed.svg b/packages/ui/src/assets/images/embed.svg new file mode 100644 index 000000000..d20ceab0c --- /dev/null +++ b/packages/ui/src/assets/images/embed.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/ui-component/dialog/APICodeDialog.js b/packages/ui/src/ui-component/dialog/APICodeDialog.js index b744b9193..e59d9a3d5 100644 --- a/packages/ui/src/ui-component/dialog/APICodeDialog.js +++ b/packages/ui/src/ui-component/dialog/APICodeDialog.js @@ -18,6 +18,7 @@ import { SET_CHATFLOW } from 'store/actions' import pythonSVG from 'assets/images/python.svg' import javascriptSVG from 'assets/images/javascript.svg' import cURLSVG from 'assets/images/cURL.svg' +import EmbedSVG from 'assets/images/embed.svg' // API import apiKeyApi from 'api/apikey' @@ -118,11 +119,21 @@ const getConfigExamplesForCurl = (configData, bodyType) => { return finalStr } +const embedCode = (chatflowid) => { + return `` +} + const APICodeDialog = ({ show, dialogProps, onCancel }) => { const portalElement = document.getElementById('portal') const navigate = useNavigate() const dispatch = useDispatch() - const codes = ['Python', 'JavaScript', 'cURL'] + const codes = ['Embed', 'Python', 'JavaScript', 'cURL'] const [value, setValue] = useState(0) const [keyOptions, setKeyOptions] = useState([]) const [apiKeys, setAPIKeys] = useState([]) @@ -199,6 +210,8 @@ query({"question": "Hey, how are you?"}).then((response) => { return `curl ${baseURL}/api/v1/prediction/${dialogProps.chatflowid} \\ -X POST \\ -d '{"question": "Hey, how are you?"}'` + } else if (codeLang === 'Embed') { + return embedCode(dialogProps.chatflowid) } return '' } @@ -241,6 +254,8 @@ query({"question": "Hey, how are you?"}).then((response) => { -X POST \\ -d '{"question": "Hey, how are you?"}' -H "Authorization: Bearer ${selectedApiKey?.apiKey}"` + } else if (codeLang === 'Embed') { + return embedCode(dialogProps.chatflowid) } return '' } @@ -248,7 +263,7 @@ query({"question": "Hey, how are you?"}).then((response) => { const getLang = (codeLang) => { if (codeLang === 'Python') { return 'python' - } else if (codeLang === 'JavaScript') { + } else if (codeLang === 'JavaScript' || codeLang === 'Embed') { return 'javascript' } else if (codeLang === 'cURL') { return 'bash' @@ -261,6 +276,8 @@ query({"question": "Hey, how are you?"}).then((response) => { return pythonSVG } else if (codeLang === 'JavaScript') { return javascriptSVG + } else if (codeLang === 'Embed') { + return EmbedSVG } else if (codeLang === 'cURL') { return cURLSVG } @@ -510,11 +527,7 @@ query({ {codes.map((codeLang, index) => ( + code } iconPosition='start' key={index} @@ -524,19 +537,29 @@ query({ ))} -
- onApiKeySelected(newValue)} - value={dialogProps.chatflowApiKeyId ?? chatflowApiKeyId ?? 'Choose an API key'} - /> -
+ {value !== 0 && ( +
+ onApiKeySelected(newValue)} + value={dialogProps.chatflowApiKeyId ?? chatflowApiKeyId ?? 'Choose an API key'} + /> +
+ )}
{codes.map((codeLang, index) => ( + {value === 0 && ( + <> + + Paste this anywhere in the {``} tag of your html file + +
+ + )} - - {checkboxVal && getConfigApi.data && getConfigApi.data.length > 0 && ( + {value !== 0 && } + {value !== 0 && checkboxVal && getConfigApi.data && getConfigApi.data.length > 0 && ( <> { - if (chatflow.id) handleSaveFlow(chatflow.name) + if (chatflow.id) handleSaveFlow(flowName) else setFlowDialogOpen(true) }