From 2ac951689ff1dd2b5b8f2e3df631ac13fc678354 Mon Sep 17 00:00:00 2001 From: Henry Date: Fri, 28 Apr 2023 00:39:10 +0100 Subject: [PATCH] add params for openai --- .../nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts | 53 +++++++++++++- .../OpenAIEmbedding/OpenAIEmbedding.ts | 36 +++++++++- .../components/nodes/llms/OpenAI/OpenAI.ts | 72 ++++++++++++++++++- 3 files changed, 154 insertions(+), 7 deletions(-) diff --git a/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts b/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts index 66104c9ee..dfb9b5d5c 100644 --- a/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts +++ b/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts @@ -1,3 +1,4 @@ +import { OpenAIChatInput } from 'langchain/llms/openai' import { INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses } from '../../../src/utils' import { ChatOpenAI } from 'langchain/chat_models/openai' @@ -61,6 +62,41 @@ class ChatOpenAI_ChatModels implements INode { type: 'number', default: 0.9, optional: true + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Top Probability', + name: 'topP', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Frequency Penalty', + name: 'frequencyPenalty', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Presence Penalty', + name: 'presencePenalty', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Timeout', + name: 'timeout', + type: 'number', + optional: true, + additionalParams: true } ] } @@ -69,12 +105,25 @@ class ChatOpenAI_ChatModels implements INode { const temperature = nodeData.inputs?.temperature as string const modelName = nodeData.inputs?.modelName as string const openAIApiKey = nodeData.inputs?.openAIApiKey as string + const maxTokens = nodeData.inputs?.maxTokens as string + const topP = nodeData.inputs?.topP as string + const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string + const presencePenalty = nodeData.inputs?.presencePenalty as string + const timeout = nodeData.inputs?.timeout as string - const model = new ChatOpenAI({ + const obj: Partial & { openAIApiKey?: string } = { temperature: parseInt(temperature, 10), modelName, openAIApiKey - }) + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (topP) obj.topP = parseInt(topP, 10) + if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10) + if (presencePenalty) obj.presencePenalty = parseInt(presencePenalty, 10) + if (timeout) obj.timeout = parseInt(timeout, 10) + + const model = new ChatOpenAI(obj) return model } } diff --git a/packages/components/nodes/embeddings/OpenAIEmbedding/OpenAIEmbedding.ts b/packages/components/nodes/embeddings/OpenAIEmbedding/OpenAIEmbedding.ts index f361be63f..3ccfab820 100644 --- a/packages/components/nodes/embeddings/OpenAIEmbedding/OpenAIEmbedding.ts +++ b/packages/components/nodes/embeddings/OpenAIEmbedding/OpenAIEmbedding.ts @@ -1,6 +1,6 @@ import { INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses } from '../../../src/utils' -import { OpenAIEmbeddings } from 'langchain/embeddings/openai' +import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from 'langchain/embeddings/openai' class OpenAIEmbedding_Embeddings implements INode { label: string @@ -25,14 +25,46 @@ class OpenAIEmbedding_Embeddings implements INode { label: 'OpenAI Api Key', name: 'openAIApiKey', type: 'password' + }, + { + label: 'Strip New Lines', + name: 'stripNewLines', + type: 'boolean', + optional: true, + additionalParams: true + }, + { + label: 'Batch Size', + name: 'batchSize', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Timeout', + name: 'timeout', + type: 'number', + optional: true, + additionalParams: true } ] } async init(nodeData: INodeData): Promise { const openAIApiKey = nodeData.inputs?.openAIApiKey as string + const stripNewLines = nodeData.inputs?.stripNewLines as boolean + const batchSize = nodeData.inputs?.batchSize as string + const timeout = nodeData.inputs?.timeout as string - const model = new OpenAIEmbeddings({ openAIApiKey }) + const obj: Partial & { openAIApiKey?: string } = { + openAIApiKey + } + + if (stripNewLines) obj.stripNewLines = stripNewLines + if (batchSize) obj.batchSize = parseInt(batchSize, 10) + if (timeout) obj.timeout = parseInt(timeout, 10) + + const model = new OpenAIEmbeddings(obj) return model } } diff --git a/packages/components/nodes/llms/OpenAI/OpenAI.ts b/packages/components/nodes/llms/OpenAI/OpenAI.ts index 7ec179ed4..af44965e3 100644 --- a/packages/components/nodes/llms/OpenAI/OpenAI.ts +++ b/packages/components/nodes/llms/OpenAI/OpenAI.ts @@ -1,6 +1,6 @@ import { INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses } from '../../../src/utils' -import { OpenAI } from 'langchain/llms/openai' +import { OpenAI, OpenAIInput } from 'langchain/llms/openai' class OpenAI_LLMs implements INode { label: string @@ -57,6 +57,55 @@ class OpenAI_LLMs implements INode { type: 'number', default: 0.7, optional: true + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Top Probability', + name: 'topP', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Best Of', + name: 'bestOf', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Frequency Penalty', + name: 'frequencyPenalty', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Presence Penalty', + name: 'presencePenalty', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Batch Size', + name: 'batchSize', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Timeout', + name: 'timeout', + type: 'number', + optional: true, + additionalParams: true } ] } @@ -65,12 +114,29 @@ class OpenAI_LLMs implements INode { const temperature = nodeData.inputs?.temperature as string const modelName = nodeData.inputs?.modelName as string const openAIApiKey = nodeData.inputs?.openAIApiKey as string + const maxTokens = nodeData.inputs?.maxTokens as string + const topP = nodeData.inputs?.topP as string + const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string + const presencePenalty = nodeData.inputs?.presencePenalty as string + const timeout = nodeData.inputs?.timeout as string + const batchSize = nodeData.inputs?.batchSize as string + const bestOf = nodeData.inputs?.bestOf as string - const model = new OpenAI({ + const obj: Partial & { openAIApiKey?: string } = { temperature: parseInt(temperature, 10), modelName, openAIApiKey - }) + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (topP) obj.topP = parseInt(topP, 10) + if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10) + if (presencePenalty) obj.presencePenalty = parseInt(presencePenalty, 10) + if (timeout) obj.timeout = parseInt(timeout, 10) + if (batchSize) obj.batchSize = parseInt(batchSize, 10) + if (bestOf) obj.bestOf = parseInt(bestOf, 10) + + const model = new OpenAI(obj) return model } }