diff --git a/packages/components/nodes/chatmodels/ChatGoogleVertexAI/ChatGoogleVertexAI.ts b/packages/components/nodes/chatmodels/ChatGoogleVertexAI/ChatGoogleVertexAI.ts index 1bc06f4bb..4cb206f53 100644 --- a/packages/components/nodes/chatmodels/ChatGoogleVertexAI/ChatGoogleVertexAI.ts +++ b/packages/components/nodes/chatmodels/ChatGoogleVertexAI/ChatGoogleVertexAI.ts @@ -114,7 +114,7 @@ class GoogleVertexAI_ChatModels implements INode { const maxOutputTokens = nodeData.inputs?.maxOutputTokens as string const topP = nodeData.inputs?.topP as string - const obj: Partial = { + const obj: GoogleVertexAIChatInput = { temperature: parseFloat(temperature), model: modelName } diff --git a/packages/components/nodes/llms/OpenAI/OpenAI.ts b/packages/components/nodes/llms/OpenAI/OpenAI.ts index 951d1a706..2960ad2a8 100644 --- a/packages/components/nodes/llms/OpenAI/OpenAI.ts +++ b/packages/components/nodes/llms/OpenAI/OpenAI.ts @@ -17,7 +17,7 @@ class OpenAI_LLMs implements INode { constructor() { this.label = 'OpenAI' this.name = 'openAI' - this.version = 1.0 + this.version = 2.0 this.type = 'OpenAI' this.icon = 'openai.png' this.category = 'LLMs' @@ -36,23 +36,19 @@ class OpenAI_LLMs implements INode { type: 'options', options: [ { - label: 'text-davinci-003', - name: 'text-davinci-003' + label: 'gpt-3.5-turbo-instruct', + name: 'gpt-3.5-turbo-instruct' }, { - label: 'text-davinci-002', - name: 'text-davinci-002' + label: 'babbage-002', + name: 'babbage-002' }, { - label: 'text-curie-001', - name: 'text-curie-001' - }, - { - label: 'text-babbage-001', - name: 'text-babbage-001' + label: 'davinci-002', + name: 'davinci-002' } ], - default: 'text-davinci-003', + default: 'gpt-3.5-turbo-instruct', optional: true }, { diff --git a/packages/components/package.json b/packages/components/package.json index a3cc77289..94dcbdce4 100644 --- a/packages/components/package.json +++ b/packages/components/package.json @@ -42,7 +42,7 @@ "google-auth-library": "^9.0.0", "graphql": "^16.6.0", "html-to-text": "^9.0.5", - "langchain": "^0.0.147", + "langchain": "^0.0.152", "langfuse-langchain": "^1.0.14-alpha.0", "langsmith": "^0.0.32", "linkifyjs": "^4.1.1", diff --git a/packages/server/marketplaces/chatflows/Multiple VectorDB.json b/packages/server/marketplaces/chatflows/Multiple VectorDB.json index 101a683b6..ac4643aab 100644 --- a/packages/server/marketplaces/chatflows/Multiple VectorDB.json +++ b/packages/server/marketplaces/chatflows/Multiple VectorDB.json @@ -328,7 +328,7 @@ "id": "openAI_2", "label": "OpenAI", "name": "openAI", - "version": 1, + "version": 2, "type": "OpenAI", "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], "category": "LLMs", @@ -347,23 +347,19 @@ "type": "options", "options": [ { - "label": "text-davinci-003", - "name": "text-davinci-003" + "label": "gpt-3.5-turbo-instruct", + "name": "gpt-3.5-turbo-instruct" }, { - "label": "text-davinci-002", - "name": "text-davinci-002" + "label": "babbage-002", + "name": "babbage-002" }, { - "label": "text-curie-001", - "name": "text-curie-001" - }, - { - "label": "text-babbage-001", - "name": "text-babbage-001" + "label": "davinci-002", + "name": "davinci-002" } ], - "default": "text-davinci-003", + "default": "gpt-3.5-turbo-instruct", "optional": true, "id": "openAI_2-input-modelName-options" }, @@ -442,7 +438,7 @@ ], "inputAnchors": [], "inputs": { - "modelName": "text-davinci-003", + "modelName": "gpt-3.5-turbo-instruct", "temperature": 0.7, "maxTokens": "", "topP": "", @@ -743,7 +739,7 @@ "id": "openAI_3", "label": "OpenAI", "name": "openAI", - "version": 1, + "version": 2, "type": "OpenAI", "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], "category": "LLMs", @@ -762,23 +758,19 @@ "type": "options", "options": [ { - "label": "text-davinci-003", - "name": "text-davinci-003" + "label": "gpt-3.5-turbo-instruct", + "name": "gpt-3.5-turbo-instruct" }, { - "label": "text-davinci-002", - "name": "text-davinci-002" + "label": "babbage-002", + "name": "babbage-002" }, { - "label": "text-curie-001", - "name": "text-curie-001" - }, - { - "label": "text-babbage-001", - "name": "text-babbage-001" + "label": "davinci-002", + "name": "davinci-002" } ], - "default": "text-davinci-003", + "default": "gpt-3.5-turbo-instruct", "optional": true, "id": "openAI_3-input-modelName-options" }, @@ -857,7 +849,7 @@ ], "inputAnchors": [], "inputs": { - "modelName": "text-davinci-003", + "modelName": "gpt-3.5-turbo-instruct", "temperature": 0.7, "maxTokens": "", "topP": "", @@ -1008,7 +1000,7 @@ "id": "openAI_4", "label": "OpenAI", "name": "openAI", - "version": 1, + "version": 2, "type": "OpenAI", "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], "category": "LLMs", @@ -1027,23 +1019,19 @@ "type": "options", "options": [ { - "label": "text-davinci-003", - "name": "text-davinci-003" + "label": "gpt-3.5-turbo-instruct", + "name": "gpt-3.5-turbo-instruct" }, { - "label": "text-davinci-002", - "name": "text-davinci-002" + "label": "babbage-002", + "name": "babbage-002" }, { - "label": "text-curie-001", - "name": "text-curie-001" - }, - { - "label": "text-babbage-001", - "name": "text-babbage-001" + "label": "davinci-002", + "name": "davinci-002" } ], - "default": "text-davinci-003", + "default": "gpt-3.5-turbo-instruct", "optional": true, "id": "openAI_4-input-modelName-options" }, @@ -1122,7 +1110,7 @@ ], "inputAnchors": [], "inputs": { - "modelName": "text-davinci-003", + "modelName": "gpt-3.5-turbo-instruct", "temperature": 0.7, "maxTokens": "", "topP": "", diff --git a/packages/server/marketplaces/chatflows/Prompt Chaining.json b/packages/server/marketplaces/chatflows/Prompt Chaining.json index e0491cc1c..5bce99057 100644 --- a/packages/server/marketplaces/chatflows/Prompt Chaining.json +++ b/packages/server/marketplaces/chatflows/Prompt Chaining.json @@ -296,7 +296,7 @@ "id": "openAI_1", "label": "OpenAI", "name": "openAI", - "version": 1, + "version": 2, "type": "OpenAI", "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], "category": "LLMs", @@ -315,23 +315,19 @@ "type": "options", "options": [ { - "label": "text-davinci-003", - "name": "text-davinci-003" + "label": "gpt-3.5-turbo-instruct", + "name": "gpt-3.5-turbo-instruct" }, { - "label": "text-davinci-002", - "name": "text-davinci-002" + "label": "babbage-002", + "name": "babbage-002" }, { - "label": "text-curie-001", - "name": "text-curie-001" - }, - { - "label": "text-babbage-001", - "name": "text-babbage-001" + "label": "davinci-002", + "name": "davinci-002" } ], - "default": "text-davinci-003", + "default": "gpt-3.5-turbo-instruct", "optional": true, "id": "openAI_1-input-modelName-options" }, @@ -410,7 +406,7 @@ ], "inputAnchors": [], "inputs": { - "modelName": "text-davinci-003", + "modelName": "gpt-3.5-turbo-instruct", "temperature": 0.7, "maxTokens": "", "topP": "", @@ -452,7 +448,7 @@ "id": "openAI_2", "label": "OpenAI", "name": "openAI", - "version": 1, + "version": 2, "type": "OpenAI", "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], "category": "LLMs", @@ -471,23 +467,19 @@ "type": "options", "options": [ { - "label": "text-davinci-003", - "name": "text-davinci-003" + "label": "gpt-3.5-turbo-instruct", + "name": "gpt-3.5-turbo-instruct" }, { - "label": "text-davinci-002", - "name": "text-davinci-002" + "label": "babbage-002", + "name": "babbage-002" }, { - "label": "text-curie-001", - "name": "text-curie-001" - }, - { - "label": "text-babbage-001", - "name": "text-babbage-001" + "label": "davinci-002", + "name": "davinci-002" } ], - "default": "text-davinci-003", + "default": "gpt-3.5-turbo-instruct", "optional": true, "id": "openAI_2-input-modelName-options" }, @@ -565,8 +557,9 @@ } ], "inputAnchors": [], + "default": "gpt-3.5-turbo-instruct", "inputs": { - "modelName": "text-davinci-003", + "modelName": "gpt-3.5-turbo-instruct", "temperature": 0.7, "maxTokens": "", "topP": "", diff --git a/packages/server/marketplaces/chatflows/Simple LLM Chain.json b/packages/server/marketplaces/chatflows/Simple LLM Chain.json index 0fc648c66..21d5ab689 100644 --- a/packages/server/marketplaces/chatflows/Simple LLM Chain.json +++ b/packages/server/marketplaces/chatflows/Simple LLM Chain.json @@ -155,7 +155,7 @@ "id": "openAI_0", "label": "OpenAI", "name": "openAI", - "version": 1, + "version": 2, "type": "OpenAI", "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], "category": "LLMs", @@ -174,23 +174,19 @@ "type": "options", "options": [ { - "label": "text-davinci-003", - "name": "text-davinci-003" + "label": "gpt-3.5-turbo-instruct", + "name": "gpt-3.5-turbo-instruct" }, { - "label": "text-davinci-002", - "name": "text-davinci-002" + "label": "babbage-002", + "name": "babbage-002" }, { - "label": "text-curie-001", - "name": "text-curie-001" - }, - { - "label": "text-babbage-001", - "name": "text-babbage-001" + "label": "davinci-002", + "name": "davinci-002" } ], - "default": "text-davinci-003", + "default": "gpt-3.5-turbo-instruct", "optional": true, "id": "openAI_0-input-modelName-options" }, @@ -269,7 +265,7 @@ ], "inputAnchors": [], "inputs": { - "modelName": "text-davinci-003", + "modelName": "gpt-3.5-turbo-instruct", "temperature": 0.7, "maxTokens": "", "topP": "", diff --git a/packages/server/marketplaces/chatflows/Zapier NLA.json b/packages/server/marketplaces/chatflows/Zapier NLA.json index 60258b466..182b24aee 100644 --- a/packages/server/marketplaces/chatflows/Zapier NLA.json +++ b/packages/server/marketplaces/chatflows/Zapier NLA.json @@ -115,7 +115,7 @@ "id": "openAI_0", "label": "OpenAI", "name": "openAI", - "version": 1, + "version": 2, "type": "OpenAI", "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], "category": "LLMs", @@ -134,23 +134,19 @@ "type": "options", "options": [ { - "label": "text-davinci-003", - "name": "text-davinci-003" + "label": "gpt-3.5-turbo-instruct", + "name": "gpt-3.5-turbo-instruct" }, { - "label": "text-davinci-002", - "name": "text-davinci-002" + "label": "babbage-002", + "name": "babbage-002" }, { - "label": "text-curie-001", - "name": "text-curie-001" - }, - { - "label": "text-babbage-001", - "name": "text-babbage-001" + "label": "davinci-002", + "name": "davinci-002" } ], - "default": "text-davinci-003", + "default": "gpt-3.5-turbo-instruct", "optional": true, "id": "openAI_0-input-modelName-options" }, @@ -229,7 +225,7 @@ ], "inputAnchors": [], "inputs": { - "modelName": "text-davinci-003", + "modelName": "gpt-3.5-turbo-instruct", "temperature": 0.7, "maxTokens": "", "topP": "",