diff --git a/packages/components/nodes/chatmodels/ChatOpenRouter/ChatOpenRouter.ts b/packages/components/nodes/chatmodels/ChatOpenRouter/ChatOpenRouter.ts index 8fd42920f..4defd19ed 100644 --- a/packages/components/nodes/chatmodels/ChatOpenRouter/ChatOpenRouter.ts +++ b/packages/components/nodes/chatmodels/ChatOpenRouter/ChatOpenRouter.ts @@ -1,7 +1,8 @@ -import { ChatOpenAI, ChatOpenAIFields } from '@langchain/openai' +import { ChatOpenAI as LangchainChatOpenAI, ChatOpenAIFields } from '@langchain/openai' import { BaseCache } from '@langchain/core/caches' -import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { ICommonObject, IMultiModalOption, INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { ChatOpenRouter } from './FlowiseChatOpenRouter' class ChatOpenRouter_ChatModels implements INode { label: string @@ -23,7 +24,7 @@ class ChatOpenRouter_ChatModels implements INode { this.icon = 'openRouter.svg' this.category = 'Chat Models' this.description = 'Wrapper around Open Router Inference API' - this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)] + this.baseClasses = [this.type, ...getBaseClasses(LangchainChatOpenAI)] this.credential = { label: 'Connect Credential', name: 'credential', @@ -114,6 +115,40 @@ class ChatOpenRouter_ChatModels implements INode { type: 'json', optional: true, additionalParams: true + }, + { + label: 'Allow Image Uploads', + name: 'allowImageUploads', + type: 'boolean', + description: + 'Allow image input. Refer to the docs for more details.', + default: false, + optional: true + }, + { + label: 'Image Resolution', + description: 'This parameter controls the resolution in which the model views the image.', + name: 'imageResolution', + type: 'options', + options: [ + { + label: 'Low', + name: 'low' + }, + { + label: 'High', + name: 'high' + }, + { + label: 'Auto', + name: 'auto' + } + ], + default: 'low', + optional: false, + show: { + allowImageUploads: true + } } ] } @@ -130,6 +165,8 @@ class ChatOpenRouter_ChatModels implements INode { const basePath = (nodeData.inputs?.basepath as string) || 'https://openrouter.ai/api/v1' const baseOptions = nodeData.inputs?.baseOptions const cache = nodeData.inputs?.cache as BaseCache + const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean + const imageResolution = nodeData.inputs?.imageResolution as string const credentialData = await getCredentialData(nodeData.credential ?? '', options) const openRouterApiKey = getCredentialParam('openRouterApiKey', credentialData, nodeData) @@ -155,7 +192,7 @@ class ChatOpenRouter_ChatModels implements INode { try { parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions) } catch (exception) { - throw new Error("Invalid JSON in the ChatCerebras's BaseOptions: " + exception) + throw new Error("Invalid JSON in the ChatOpenRouter's BaseOptions: " + exception) } } @@ -166,7 +203,15 @@ class ChatOpenRouter_ChatModels implements INode { } } - const model = new ChatOpenAI(obj) + const multiModalOption: IMultiModalOption = { + image: { + allowImageUploads: allowImageUploads ?? false, + imageResolution + } + } + + const model = new ChatOpenRouter(nodeData.id, obj) + model.setMultiModalOption(multiModalOption) return model } } diff --git a/packages/components/nodes/chatmodels/ChatOpenRouter/FlowiseChatOpenRouter.ts b/packages/components/nodes/chatmodels/ChatOpenRouter/FlowiseChatOpenRouter.ts new file mode 100644 index 000000000..bca0c5d16 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatOpenRouter/FlowiseChatOpenRouter.ts @@ -0,0 +1,29 @@ +import { ChatOpenAI as LangchainChatOpenAI, ChatOpenAIFields } from '@langchain/openai' +import { IMultiModalOption, IVisionChatModal } from '../../../src' + +export class ChatOpenRouter extends LangchainChatOpenAI implements IVisionChatModal { + configuredModel: string + configuredMaxToken?: number + multiModalOption: IMultiModalOption + id: string + + constructor(id: string, fields?: ChatOpenAIFields) { + super(fields) + this.id = id + this.configuredModel = fields?.modelName ?? '' + this.configuredMaxToken = fields?.maxTokens + } + + revertToOriginalModel(): void { + this.model = this.configuredModel + this.maxTokens = this.configuredMaxToken + } + + setMultiModalOption(multiModalOption: IMultiModalOption): void { + this.multiModalOption = multiModalOption + } + + setVisionModel(): void { + // pass - OpenRouter models don't need model switching + } +}