diff --git a/package.json b/package.json
index 2be9282be..1866f7abe 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "flowise",
- "version": "1.4.2",
+ "version": "1.4.3",
"private": true,
"homepage": "https://flowiseai.com",
"workspaces": [
diff --git a/packages/components/credentials/LangfuseApi.credential.ts b/packages/components/credentials/LangfuseApi.credential.ts
index 452ca9897..923af5177 100644
--- a/packages/components/credentials/LangfuseApi.credential.ts
+++ b/packages/components/credentials/LangfuseApi.credential.ts
@@ -12,7 +12,7 @@ class LangfuseApi implements INodeCredential {
this.name = 'langfuseApi'
this.version = 1.0
this.description =
- 'Refer to official guide on how to get API key on Langfuse'
+ 'Refer to integration guide on how to get API keys on Langfuse'
this.inputs = [
{
label: 'Secret Key',
diff --git a/packages/components/nodes/agents/ConversationalRetrievalAgent/ConversationalRetrievalAgent.ts b/packages/components/nodes/agents/ConversationalRetrievalAgent/ConversationalRetrievalAgent.ts
index 4a908d7fe..7b71cb5f8 100644
--- a/packages/components/nodes/agents/ConversationalRetrievalAgent/ConversationalRetrievalAgent.ts
+++ b/packages/components/nodes/agents/ConversationalRetrievalAgent/ConversationalRetrievalAgent.ts
@@ -21,7 +21,7 @@ class ConversationalRetrievalAgent_Agents implements INode {
constructor() {
this.label = 'Conversational Retrieval Agent'
this.name = 'conversationalRetrievalAgent'
- this.version = 1.0
+ this.version = 2.0
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'agent.svg'
@@ -40,9 +40,9 @@ class ConversationalRetrievalAgent_Agents implements INode {
type: 'BaseChatMemory'
},
{
- label: 'OpenAI Chat Model',
+ label: 'OpenAI/Azure Chat Model',
name: 'model',
- type: 'ChatOpenAI'
+ type: 'ChatOpenAI | AzureChatOpenAI'
},
{
label: 'System Message',
diff --git a/packages/components/nodes/agents/OpenAIFunctionAgent/OpenAIFunctionAgent.ts b/packages/components/nodes/agents/OpenAIFunctionAgent/OpenAIFunctionAgent.ts
index c920c399e..ce6f576fc 100644
--- a/packages/components/nodes/agents/OpenAIFunctionAgent/OpenAIFunctionAgent.ts
+++ b/packages/components/nodes/agents/OpenAIFunctionAgent/OpenAIFunctionAgent.ts
@@ -20,11 +20,11 @@ class OpenAIFunctionAgent_Agents implements INode {
constructor() {
this.label = 'OpenAI Function Agent'
this.name = 'openAIFunctionAgent'
- this.version = 1.0
+ this.version = 2.0
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'openai.png'
- this.description = `An agent that uses OpenAI's Function Calling functionality to pick the tool and args to call`
+ this.description = `An agent that uses Function Calling to pick the tool and args to call`
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.inputs = [
{
@@ -39,11 +39,9 @@ class OpenAIFunctionAgent_Agents implements INode {
type: 'BaseChatMemory'
},
{
- label: 'OpenAI Chat Model',
+ label: 'OpenAI/Azure Chat Model',
name: 'model',
- description:
- 'Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer docs for more info',
- type: 'BaseChatModel'
+ type: 'ChatOpenAI | AzureChatOpenAI'
},
{
label: 'System Message',
diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts
index ee532a279..fd398151a 100644
--- a/packages/components/nodes/chains/LLMChain/LLMChain.ts
+++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts
@@ -1,7 +1,7 @@
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses, handleEscapeCharacters } from '../../../src/utils'
import { LLMChain } from 'langchain/chains'
-import { BaseLanguageModel } from 'langchain/base_language'
+import { BaseLanguageModel, BaseLanguageModelCallOptions } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { BaseOutputParser } from 'langchain/schema/output_parser'
import { formatResponse, injectOutputParser } from '../../outputparsers/OutputParserHelpers'
@@ -141,7 +141,7 @@ class LLMChain_Chains implements INode {
const runPrediction = async (
inputVariables: string[],
- chain: LLMChain,
+ chain: LLMChain>,
input: string,
promptValuesRaw: ICommonObject | undefined,
options: ICommonObject,
@@ -164,7 +164,7 @@ const runPrediction = async (
if (moderations && moderations.length > 0) {
try {
// Use the output of the moderation chain as input for the LLM chain
- input = await checkInputs(moderations, chain.llm, input)
+ input = await checkInputs(moderations, input)
} catch (e) {
await new Promise((resolve) => setTimeout(resolve, 500))
streamResponse(isStreaming, e.message, socketIO, socketIOClientId)
diff --git a/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts b/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts
index ade46ab94..956fcdb33 100644
--- a/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts
+++ b/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts
@@ -27,7 +27,7 @@ class AWSChatBedrock_ChatModels implements INode {
constructor() {
this.label = 'AWS Bedrock'
this.name = 'awsChatBedrock'
- this.version = 2.0
+ this.version = 3.0
this.type = 'AWSChatBedrock'
this.icon = 'awsBedrock.png'
this.category = 'Chat Models'
@@ -97,7 +97,8 @@ class AWSChatBedrock_ChatModels implements INode {
options: [
{ label: 'anthropic.claude-instant-v1', name: 'anthropic.claude-instant-v1' },
{ label: 'anthropic.claude-v1', name: 'anthropic.claude-v1' },
- { label: 'anthropic.claude-v2', name: 'anthropic.claude-v2' }
+ { label: 'anthropic.claude-v2', name: 'anthropic.claude-v2' },
+ { label: 'meta.llama2-13b-chat-v1', name: 'meta.llama2-13b-chat-v1' }
],
default: 'anthropic.claude-v2'
},
@@ -128,12 +129,14 @@ class AWSChatBedrock_ChatModels implements INode {
const iTemperature = nodeData.inputs?.temperature as string
const iMax_tokens_to_sample = nodeData.inputs?.max_tokens_to_sample as string
const cache = nodeData.inputs?.cache as BaseCache
+ const streaming = nodeData.inputs?.streaming as boolean
const obj: BaseBedrockInput & BaseLLMParams = {
region: iRegion,
model: iModel,
maxTokens: parseInt(iMax_tokens_to_sample, 10),
- temperature: parseFloat(iTemperature)
+ temperature: parseFloat(iTemperature),
+ streaming: streaming ?? true
}
/**
diff --git a/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts b/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts
index ba2aa5e7d..8249d5121 100644
--- a/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts
+++ b/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts
@@ -18,7 +18,7 @@ class AWSBedrockEmbedding_Embeddings implements INode {
constructor() {
this.label = 'AWS Bedrock Embeddings'
this.name = 'AWSBedrockEmbeddings'
- this.version = 1.0
+ this.version = 2.0
this.type = 'AWSBedrockEmbeddings'
this.icon = 'awsBedrock.png'
this.category = 'Embeddings'
@@ -81,7 +81,9 @@ class AWSBedrockEmbedding_Embeddings implements INode {
type: 'options',
options: [
{ label: 'amazon.titan-embed-text-v1', name: 'amazon.titan-embed-text-v1' },
- { label: 'amazon.titan-embed-g1-text-02', name: 'amazon.titan-embed-g1-text-02' }
+ { label: 'amazon.titan-embed-g1-text-02', name: 'amazon.titan-embed-g1-text-02' },
+ { label: 'cohere.embed-english-v3', name: 'cohere.embed-english-v3' },
+ { label: 'cohere.embed-multilingual-v3', name: 'cohere.embed-multilingual-v3' }
],
default: 'amazon.titan-embed-text-v1'
}
diff --git a/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts b/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts
index b67219f37..177a32ef9 100644
--- a/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts
+++ b/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts
@@ -27,7 +27,7 @@ class AWSBedrock_LLMs implements INode {
constructor() {
this.label = 'AWS Bedrock'
this.name = 'awsBedrock'
- this.version = 1.2
+ this.version = 2.0
this.type = 'AWSBedrock'
this.icon = 'awsBedrock.png'
this.category = 'LLMs'
@@ -98,6 +98,7 @@ class AWSBedrock_LLMs implements INode {
{ label: 'amazon.titan-tg1-large', name: 'amazon.titan-tg1-large' },
{ label: 'amazon.titan-e1t-medium', name: 'amazon.titan-e1t-medium' },
{ label: 'cohere.command-text-v14', name: 'cohere.command-text-v14' },
+ { label: 'cohere.command-light-text-v14', name: 'cohere.command-light-text-v14' },
{ label: 'ai21.j2-grande-instruct', name: 'ai21.j2-grande-instruct' },
{ label: 'ai21.j2-jumbo-instruct', name: 'ai21.j2-jumbo-instruct' },
{ label: 'ai21.j2-mid', name: 'ai21.j2-mid' },
diff --git a/packages/components/nodes/moderation/Moderation.ts b/packages/components/nodes/moderation/Moderation.ts
index 9c40f55ab..9fd2bfde3 100644
--- a/packages/components/nodes/moderation/Moderation.ts
+++ b/packages/components/nodes/moderation/Moderation.ts
@@ -1,13 +1,12 @@
-import { BaseLanguageModel } from 'langchain/base_language'
import { Server } from 'socket.io'
export abstract class Moderation {
- abstract checkForViolations(llm: BaseLanguageModel, input: string): Promise
+ abstract checkForViolations(input: string): Promise
}
-export const checkInputs = async (inputModerations: Moderation[], llm: BaseLanguageModel, input: string): Promise => {
+export const checkInputs = async (inputModerations: Moderation[], input: string): Promise => {
for (const moderation of inputModerations) {
- input = await moderation.checkForViolations(llm, input)
+ input = await moderation.checkForViolations(input)
}
return input
}
diff --git a/packages/components/nodes/moderation/OpenAIModeration/OpenAIModeration.ts b/packages/components/nodes/moderation/OpenAIModeration/OpenAIModeration.ts
index 5233f174f..51578630c 100644
--- a/packages/components/nodes/moderation/OpenAIModeration/OpenAIModeration.ts
+++ b/packages/components/nodes/moderation/OpenAIModeration/OpenAIModeration.ts
@@ -1,5 +1,5 @@
-import { INode, INodeData, INodeParams } from '../../../src/Interface'
-import { getBaseClasses } from '../../../src'
+import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
+import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src'
import { Moderation } from '../Moderation'
import { OpenAIModerationRunner } from './OpenAIModerationRunner'
@@ -12,6 +12,7 @@ class OpenAIModeration implements INode {
icon: string
category: string
baseClasses: string[]
+ credential: INodeParams
inputs: INodeParams[]
constructor() {
@@ -23,6 +24,12 @@ class OpenAIModeration implements INode {
this.category = 'Moderation'
this.description = 'Check whether content complies with OpenAI usage policies.'
this.baseClasses = [this.type, ...getBaseClasses(Moderation)]
+ this.credential = {
+ label: 'Connect Credential',
+ name: 'credential',
+ type: 'credential',
+ credentialNames: ['openAIApi']
+ }
this.inputs = [
{
label: 'Error Message',
@@ -35,8 +42,11 @@ class OpenAIModeration implements INode {
]
}
- async init(nodeData: INodeData): Promise {
- const runner = new OpenAIModerationRunner()
+ async init(nodeData: INodeData, _: string, options: ICommonObject): Promise {
+ const credentialData = await getCredentialData(nodeData.credential ?? '', options)
+ const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, nodeData)
+
+ const runner = new OpenAIModerationRunner(openAIApiKey)
const moderationErrorMessage = nodeData.inputs?.moderationErrorMessage as string
if (moderationErrorMessage) runner.setErrorMessage(moderationErrorMessage)
return runner
diff --git a/packages/components/nodes/moderation/OpenAIModeration/OpenAIModerationRunner.ts b/packages/components/nodes/moderation/OpenAIModeration/OpenAIModerationRunner.ts
index c517f419a..3a3ec5502 100644
--- a/packages/components/nodes/moderation/OpenAIModeration/OpenAIModerationRunner.ts
+++ b/packages/components/nodes/moderation/OpenAIModeration/OpenAIModerationRunner.ts
@@ -1,18 +1,21 @@
import { Moderation } from '../Moderation'
-import { BaseLanguageModel } from 'langchain/base_language'
import { OpenAIModerationChain } from 'langchain/chains'
export class OpenAIModerationRunner implements Moderation {
+ private openAIApiKey = ''
private moderationErrorMessage: string = "Text was found that violates OpenAI's content policy."
- async checkForViolations(llm: BaseLanguageModel, input: string): Promise {
- const openAIApiKey = (llm as any).openAIApiKey
- if (!openAIApiKey) {
+ constructor(openAIApiKey: string) {
+ this.openAIApiKey = openAIApiKey
+ }
+
+ async checkForViolations(input: string): Promise {
+ if (!this.openAIApiKey) {
throw Error('OpenAI API key not found')
}
// Create a new instance of the OpenAIModerationChain
const moderation = new OpenAIModerationChain({
- openAIApiKey: openAIApiKey,
+ openAIApiKey: this.openAIApiKey,
throwError: false // If set to true, the call will throw an error when the moderation chain detects violating content. If set to false, violating content will return "Text was found that violates OpenAI's content policy.".
})
// Send the user's input to the moderation chain and wait for the result
diff --git a/packages/components/nodes/moderation/SimplePromptModeration/SimplePromptModerationRunner.ts b/packages/components/nodes/moderation/SimplePromptModeration/SimplePromptModerationRunner.ts
index 7fc251ad4..94967ba2d 100644
--- a/packages/components/nodes/moderation/SimplePromptModeration/SimplePromptModerationRunner.ts
+++ b/packages/components/nodes/moderation/SimplePromptModeration/SimplePromptModerationRunner.ts
@@ -1,5 +1,4 @@
import { Moderation } from '../Moderation'
-import { BaseLanguageModel } from 'langchain/base_language'
export class SimplePromptModerationRunner implements Moderation {
private readonly denyList: string = ''
@@ -13,7 +12,7 @@ export class SimplePromptModerationRunner implements Moderation {
this.moderationErrorMessage = moderationErrorMessage
}
- async checkForViolations(_: BaseLanguageModel, input: string): Promise {
+ async checkForViolations(input: string): Promise {
this.denyList.split('\n').forEach((denyListItem) => {
if (denyListItem && denyListItem !== '' && input.includes(denyListItem)) {
throw Error(this.moderationErrorMessage)
diff --git a/packages/components/nodes/outputparsers/OutputParserHelpers.ts b/packages/components/nodes/outputparsers/OutputParserHelpers.ts
index a94edddd3..8ea77e6bf 100644
--- a/packages/components/nodes/outputparsers/OutputParserHelpers.ts
+++ b/packages/components/nodes/outputparsers/OutputParserHelpers.ts
@@ -1,6 +1,6 @@
import { BaseOutputParser } from 'langchain/schema/output_parser'
import { LLMChain } from 'langchain/chains'
-import { BaseLanguageModel } from 'langchain/base_language'
+import { BaseLanguageModel, BaseLanguageModelCallOptions } from 'langchain/base_language'
import { ICommonObject } from '../../src'
import { ChatPromptTemplate, FewShotPromptTemplate, PromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts'
@@ -15,7 +15,7 @@ export const formatResponse = (response: string | object): string | object => {
export const injectOutputParser = (
outputParser: BaseOutputParser,
- chain: LLMChain,
+ chain: LLMChain>,
promptValues: ICommonObject | undefined = undefined
) => {
if (outputParser && chain.prompt) {
diff --git a/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts b/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts
index 51394613e..620c3af7f 100644
--- a/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts
+++ b/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts
@@ -31,7 +31,8 @@ class InMemoryVectorStore_VectorStores implements INode {
label: 'Document',
name: 'document',
type: 'Document',
- list: true
+ list: true,
+ optional: true
},
{
label: 'Embeddings',
diff --git a/packages/components/package.json b/packages/components/package.json
index 739aa4b26..c5c05e200 100644
--- a/packages/components/package.json
+++ b/packages/components/package.json
@@ -1,6 +1,6 @@
{
"name": "flowise-components",
- "version": "1.4.2",
+ "version": "1.4.3",
"description": "Flowiseai Components",
"main": "dist/src/index",
"types": "dist/src/index.d.ts",
@@ -26,7 +26,8 @@
"@dqbd/tiktoken": "^1.0.7",
"@elastic/elasticsearch": "^8.9.0",
"@getzep/zep-js": "^0.6.3",
- "@gomomento/sdk": "^1.40.2",
+ "@gomomento/sdk": "^1.51.1",
+ "@gomomento/sdk-core": "^1.51.1",
"@google-ai/generativelanguage": "^0.2.1",
"@huggingface/inference": "^2.6.1",
"@notionhq/client": "^2.2.8",
@@ -55,7 +56,7 @@
"html-to-text": "^9.0.5",
"ioredis": "^5.3.2",
"jsdom": "^22.1.0",
- "langchain": "^0.0.165",
+ "langchain": "^0.0.196",
"langfuse-langchain": "^1.0.31",
"langsmith": "^0.0.32",
"linkifyjs": "^4.1.1",
diff --git a/packages/server/marketplaces/chatflows/API Agent OpenAI.json b/packages/server/marketplaces/chatflows/API Agent OpenAI.json
index 5498b4f36..4950a6a66 100644
--- a/packages/server/marketplaces/chatflows/API Agent OpenAI.json
+++ b/packages/server/marketplaces/chatflows/API Agent OpenAI.json
@@ -334,7 +334,7 @@
"id": "openAIFunctionAgent_0",
"label": "OpenAI Function Agent",
"name": "openAIFunctionAgent",
- "version": 1,
+ "version": 2,
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain"],
"category": "Agents",
@@ -365,11 +365,10 @@
"id": "openAIFunctionAgent_0-input-memory-BaseChatMemory"
},
{
- "label": "OpenAI Chat Model",
+ "label": "OpenAI/Azure Chat Model",
"name": "model",
- "description": "Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer docs for more info",
- "type": "BaseChatModel",
- "id": "openAIFunctionAgent_0-input-model-BaseChatModel"
+ "type": "ChatOpenAI | AzureChatOpenAI",
+ "id": "openAIFunctionAgent_0-input-model-ChatOpenAI | AzureChatOpenAI"
}
],
"inputs": {
diff --git a/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json b/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json
index aafc8e8e2..800ae3000 100644
--- a/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json
+++ b/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json
@@ -98,7 +98,7 @@
"data": {
"id": "conversationalRetrievalAgent_0",
"label": "Conversational Retrieval Agent",
- "version": 1,
+ "version": 2,
"name": "conversationalRetrievalAgent",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain", "Runnable"],
@@ -130,10 +130,10 @@
"id": "conversationalRetrievalAgent_0-input-memory-BaseChatMemory"
},
{
- "label": "OpenAI Chat Model",
+ "label": "OpenAI/Azure Chat Model",
"name": "model",
- "type": "ChatOpenAI",
- "id": "conversationalRetrievalAgent_0-input-model-ChatOpenAI"
+ "type": "ChatOpenAI | AzureChatOpenAI",
+ "id": "conversationalRetrievalAgent_0-input-model-ChatOpenAI | AzureChatOpenAI"
}
],
"inputs": {
diff --git a/packages/server/marketplaces/chatflows/OpenAI Agent.json b/packages/server/marketplaces/chatflows/OpenAI Agent.json
index a4944af09..bc27a9fe8 100644
--- a/packages/server/marketplaces/chatflows/OpenAI Agent.json
+++ b/packages/server/marketplaces/chatflows/OpenAI Agent.json
@@ -206,7 +206,7 @@
"id": "openAIFunctionAgent_0",
"label": "OpenAI Function Agent",
"name": "openAIFunctionAgent",
- "version": 1,
+ "version": 2,
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain"],
"category": "Agents",
@@ -237,11 +237,10 @@
"id": "openAIFunctionAgent_0-input-memory-BaseChatMemory"
},
{
- "label": "OpenAI Chat Model",
+ "label": "OpenAI/Azure Chat Model",
"name": "model",
- "description": "Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer docs for more info",
- "type": "BaseChatModel",
- "id": "openAIFunctionAgent_0-input-model-BaseChatModel"
+ "type": "ChatOpenAI | AzureChatOpenAI",
+ "id": "openAIFunctionAgent_0-input-model-ChatOpenAI | AzureChatOpenAI"
}
],
"inputs": {
diff --git a/packages/server/package.json b/packages/server/package.json
index 854dc9a1d..f71c62e0f 100644
--- a/packages/server/package.json
+++ b/packages/server/package.json
@@ -1,6 +1,6 @@
{
"name": "flowise",
- "version": "1.4.2",
+ "version": "1.4.3",
"description": "Flowiseai Server",
"main": "dist/index",
"types": "dist/index.d.ts",
diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts
index 86d626c44..9dbb695ee 100644
--- a/packages/server/src/utils/index.ts
+++ b/packages/server/src/utils/index.ts
@@ -844,7 +844,7 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component
*/
export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNodeData: INodeData) => {
const streamAvailableLLMs = {
- 'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama'],
+ 'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama', 'awsChatBedrock'],
LLMs: ['azureOpenAI', 'openAI', 'ollama']
}
diff --git a/packages/ui/package.json b/packages/ui/package.json
index 639fb57bb..72ac0a4d8 100644
--- a/packages/ui/package.json
+++ b/packages/ui/package.json
@@ -1,6 +1,6 @@
{
"name": "flowise-ui",
- "version": "1.4.0",
+ "version": "1.4.1",
"license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://flowiseai.com",
"author": {
diff --git a/packages/ui/src/views/canvas/AddNodes.jsx b/packages/ui/src/views/canvas/AddNodes.jsx
index 49d02ef81..c7399e5eb 100644
--- a/packages/ui/src/views/canvas/AddNodes.jsx
+++ b/packages/ui/src/views/canvas/AddNodes.jsx
@@ -68,10 +68,14 @@ const AddNodes = ({ nodesData, node }) => {
else newNodes.push(vsNode)
}
delete obj['Vector Stores']
- obj['Vector Stores;DEPRECATING'] = deprecatingNodes
- accordianCategories['Vector Stores;DEPRECATING'] = isFilter ? true : false
- obj['Vector Stores;NEW'] = newNodes
- accordianCategories['Vector Stores;NEW'] = isFilter ? true : false
+ if (deprecatingNodes.length) {
+ obj['Vector Stores;DEPRECATING'] = deprecatingNodes
+ accordianCategories['Vector Stores;DEPRECATING'] = isFilter ? true : false
+ }
+ if (newNodes.length) {
+ obj['Vector Stores;NEW'] = newNodes
+ accordianCategories['Vector Stores;NEW'] = isFilter ? true : false
+ }
setNodes(obj)
}