From dfdeb02b3ad2e23e81cee0843230a4e131116b2f Mon Sep 17 00:00:00 2001 From: Mubashir Shariq <103755591+Mubashirshariq@users.noreply.github.com> Date: Fri, 5 Jul 2024 15:55:37 +0530 Subject: [PATCH] Feat/added chattBaiduWenxin chat model (#2752) * added cahtBaiduWenxin model * fix linting * fixed linting * added baidu secret key --- .../credentials/BaiduApi.credential.ts | 28 +++++++ .../ChatBaiduWenxin/ChatBaiduWenxin.ts | 80 +++++++++++++++++++ .../ChatBaiduWenxin/baiduwenxin.svg | 7 ++ packages/server/src/utils/index.ts | 3 +- 4 files changed, 117 insertions(+), 1 deletion(-) create mode 100644 packages/components/credentials/BaiduApi.credential.ts create mode 100644 packages/components/nodes/chatmodels/ChatBaiduWenxin/ChatBaiduWenxin.ts create mode 100644 packages/components/nodes/chatmodels/ChatBaiduWenxin/baiduwenxin.svg diff --git a/packages/components/credentials/BaiduApi.credential.ts b/packages/components/credentials/BaiduApi.credential.ts new file mode 100644 index 000000000..f2d8ea201 --- /dev/null +++ b/packages/components/credentials/BaiduApi.credential.ts @@ -0,0 +1,28 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class BaiduApi implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + + constructor() { + this.label = 'Baidu API' + this.name = 'baiduApi' + this.version = 1.0 + this.inputs = [ + { + label: 'Baidu Api Key', + name: 'baiduApiKey', + type: 'password' + }, + { + label: 'Baidu Secret Key', + name: 'baiduSecretKey', + type: 'password' + } + ] + } +} + +module.exports = { credClass: BaiduApi } diff --git a/packages/components/nodes/chatmodels/ChatBaiduWenxin/ChatBaiduWenxin.ts b/packages/components/nodes/chatmodels/ChatBaiduWenxin/ChatBaiduWenxin.ts new file mode 100644 index 000000000..265e8baf4 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatBaiduWenxin/ChatBaiduWenxin.ts @@ -0,0 +1,80 @@ +import { BaseCache } from '@langchain/core/caches' +import { ChatBaiduWenxin } from '@langchain/community/chat_models/baiduwenxin' +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class ChatBaiduWenxin_ChatModels implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'ChatBaiduWenxin' + this.name = 'chatBaiduWenxin' + this.version = 1.0 + this.type = 'ChatBaiduWenxin' + this.icon = 'baiduwenxin.svg' + this.category = 'Chat Models' + this.description = 'Wrapper around BaiduWenxin Chat Endpoints' + this.baseClasses = [this.type, ...getBaseClasses(ChatBaiduWenxin)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['baiduApi'] + } + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Model', + name: 'modelName', + type: 'string', + placeholder: 'ERNIE-Bot-turbo' + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + step: 0.1, + default: 0.9, + optional: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const cache = nodeData.inputs?.cache as BaseCache + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const baiduApiKey = getCredentialParam('baiduApiKey', credentialData, nodeData) + const baiduSecretKey = getCredentialParam('baiduSecretKey', credentialData, nodeData) + + const obj: Partial = { + streaming: true, + baiduApiKey, + baiduSecretKey, + modelName, + temperature: temperature ? parseFloat(temperature) : undefined + } + if (cache) obj.cache = cache + + const model = new ChatBaiduWenxin(obj) + return model + } +} + +module.exports = { nodeClass: ChatBaiduWenxin_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatBaiduWenxin/baiduwenxin.svg b/packages/components/nodes/chatmodels/ChatBaiduWenxin/baiduwenxin.svg new file mode 100644 index 000000000..afe2bc690 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatBaiduWenxin/baiduwenxin.svg @@ -0,0 +1,7 @@ + + \ No newline at end of file diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts index c350af852..849c22023 100644 --- a/packages/server/src/utils/index.ts +++ b/packages/server/src/utils/index.ts @@ -1096,7 +1096,8 @@ export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNod 'chatGoogleGenerativeAI', 'chatTogetherAI', 'chatTogetherAI_LlamaIndex', - 'chatFireworks' + 'chatFireworks', + 'chatBaiduWenxin' ], LLMs: ['azureOpenAI', 'openAI', 'ollama'] }