diff --git a/packages/components/nodes/chatmodels/ChatOpenAIFineTuned/ChatOpenAIFineTuned.ts b/packages/components/nodes/chatmodels/ChatOpenAIFineTuned/ChatOpenAIFineTuned.ts new file mode 100644 index 000000000..bfe3ba7a9 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatOpenAIFineTuned/ChatOpenAIFineTuned.ts @@ -0,0 +1,149 @@ +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { ChatOpenAI, OpenAIChatInput } from 'langchain/chat_models/openai' + +class ChatOpenAIFineTuned_ChatModels implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'ChatOpenAI Fine-Tuned' + this.name = 'chatOpenAIFineTuned' + this.version = 1.0 + this.type = 'ChatOpenAI-FineTuned' + this.icon = 'openai.png' + this.category = 'Chat Models' + this.description = 'Wrapper around fine-tuned OpenAI LLM that use the Chat endpoint' + this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['openAIApi'] + } + this.inputs = [ + { + label: 'Model Name', + name: 'modelName', + type: 'string', + placeholder: 'ft:gpt-3.5-turbo:my-org:custom_suffix:id' + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + step: 0.1, + default: 0.9, + optional: true + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Top Probability', + name: 'topP', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Frequency Penalty', + name: 'frequencyPenalty', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Presence Penalty', + name: 'presencePenalty', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Timeout', + name: 'timeout', + type: 'number', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'BasePath', + name: 'basepath', + type: 'string', + optional: true, + additionalParams: true + }, + { + label: 'BaseOptions', + name: 'baseOptions', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const maxTokens = nodeData.inputs?.maxTokens as string + const topP = nodeData.inputs?.topP as string + const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string + const presencePenalty = nodeData.inputs?.presencePenalty as string + const timeout = nodeData.inputs?.timeout as string + const streaming = nodeData.inputs?.streaming as boolean + const basePath = nodeData.inputs?.basepath as string + const baseOptions = nodeData.inputs?.baseOptions + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, nodeData) + + const obj: Partial & { openAIApiKey?: string } = { + temperature: parseFloat(temperature), + modelName, + openAIApiKey, + streaming: streaming ?? true + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (topP) obj.topP = parseFloat(topP) + if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty) + if (presencePenalty) obj.presencePenalty = parseFloat(presencePenalty) + if (timeout) obj.timeout = parseInt(timeout, 10) + + let parsedBaseOptions: any | undefined = undefined + + if (baseOptions) { + try { + parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions) + } catch (exception) { + throw new Error("Invalid JSON in the ChatOpenAI's BaseOptions: " + exception) + } + } + const model = new ChatOpenAI(obj, { + basePath, + baseOptions: parsedBaseOptions + }) + return model + } +} + +module.exports = { nodeClass: ChatOpenAIFineTuned_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatOpenAIFineTuned/openai.png b/packages/components/nodes/chatmodels/ChatOpenAIFineTuned/openai.png new file mode 100644 index 000000000..de08a05b2 Binary files /dev/null and b/packages/components/nodes/chatmodels/ChatOpenAIFineTuned/openai.png differ diff --git a/packages/components/nodes/llms/OpenAI/OpenAI.ts b/packages/components/nodes/llms/OpenAI/OpenAI.ts index 4e35d659f..951d1a706 100644 --- a/packages/components/nodes/llms/OpenAI/OpenAI.ts +++ b/packages/components/nodes/llms/OpenAI/OpenAI.ts @@ -125,6 +125,13 @@ class OpenAI_LLMs implements INode { type: 'string', optional: true, additionalParams: true + }, + { + label: 'BaseOptions', + name: 'baseOptions', + type: 'json', + optional: true, + additionalParams: true } ] } @@ -141,6 +148,7 @@ class OpenAI_LLMs implements INode { const bestOf = nodeData.inputs?.bestOf as string const streaming = nodeData.inputs?.streaming as boolean const basePath = nodeData.inputs?.basepath as string + const baseOptions = nodeData.inputs?.baseOptions const credentialData = await getCredentialData(nodeData.credential ?? '', options) const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, nodeData) @@ -160,8 +168,19 @@ class OpenAI_LLMs implements INode { if (batchSize) obj.batchSize = parseInt(batchSize, 10) if (bestOf) obj.bestOf = parseInt(bestOf, 10) + let parsedBaseOptions: any | undefined = undefined + + if (baseOptions) { + try { + parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions) + } catch (exception) { + throw new Error("Invalid JSON in the OpenAI's BaseOptions: " + exception) + } + } + const model = new OpenAI(obj, { - basePath + basePath, + baseOptions: parsedBaseOptions }) return model }