diff --git a/packages/components/credentials/SambanovaApi.credential.ts b/packages/components/credentials/SambanovaApi.credential.ts new file mode 100644 index 000000000..60a7e13d8 --- /dev/null +++ b/packages/components/credentials/SambanovaApi.credential.ts @@ -0,0 +1,23 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class SambanovaApi implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + + constructor() { + this.label = 'Sambanova API' + this.name = 'sambanovaApi' + this.version = 1.0 + this.inputs = [ + { + label: 'Sambanova Api Key', + name: 'sambanovaApiKey', + type: 'password' + } + ] + } +} + +module.exports = { credClass: SambanovaApi } diff --git a/packages/components/nodes/chatmodels/ChatSambanova/ChatSambanova.ts b/packages/components/nodes/chatmodels/ChatSambanova/ChatSambanova.ts new file mode 100644 index 000000000..a62ebfb30 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatSambanova/ChatSambanova.ts @@ -0,0 +1,123 @@ +import { BaseCache } from '@langchain/core/caches' +import { ChatOpenAI, ChatOpenAIFields } from '@langchain/openai' +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class ChatSambanova_ChatModels implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'ChatSambanova' + this.name = 'chatSambanova' + this.version = 1.0 + this.type = 'ChatSambanova' + this.icon = 'sambanova.png' + this.category = 'Chat Models' + this.description = 'Wrapper around Sambanova Chat Endpoints' + this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['sambanovaApi'] + } + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Model', + name: 'modelName', + type: 'string', + default: 'Meta-Llama-3.3-70B-Instruct', + placeholder: 'Meta-Llama-3.3-70B-Instruct' + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + step: 0.1, + default: 0.9, + optional: true + }, + { + label: 'Streaming', + name: 'streaming', + type: 'boolean', + default: true, + optional: true + }, + { + label: 'BasePath', + name: 'basepath', + type: 'string', + optional: true, + default: 'htps://api.sambanova.ai/v1', + additionalParams: true + }, + { + label: 'BaseOptions', + name: 'baseOptions', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const cache = nodeData.inputs?.cache as BaseCache + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const streaming = nodeData.inputs?.streaming as boolean + const basePath = nodeData.inputs?.basepath as string + const baseOptions = nodeData.inputs?.baseOptions + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const sambanovaApiKey = getCredentialParam('sambanovaApiKey', credentialData, nodeData) + + const obj: ChatOpenAIFields = { + temperature: temperature ? parseFloat(temperature) : undefined, + model: modelName, + apiKey: sambanovaApiKey, + openAIApiKey: sambanovaApiKey, + streaming: streaming ?? true + } + + if (cache) obj.cache = cache + + let parsedBaseOptions: any | undefined = undefined + + if (baseOptions) { + try { + parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions) + } catch (exception) { + throw new Error("Invalid JSON in the ChatSambanova's BaseOptions: " + exception) + } + } + + if (basePath || parsedBaseOptions) { + obj.configuration = { + baseURL: basePath, + defaultHeaders: parsedBaseOptions + } + } + + const model = new ChatOpenAI(obj) + return model + } +} + +module.exports = { nodeClass: ChatSambanova_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatSambanova/sambanova.png b/packages/components/nodes/chatmodels/ChatSambanova/sambanova.png new file mode 100644 index 000000000..8bc16c5d5 Binary files /dev/null and b/packages/components/nodes/chatmodels/ChatSambanova/sambanova.png differ diff --git a/packages/components/nodes/llms/SambaNova/Sambanova.ts b/packages/components/nodes/llms/SambaNova/Sambanova.ts new file mode 100644 index 000000000..4cb76aefa --- /dev/null +++ b/packages/components/nodes/llms/SambaNova/Sambanova.ts @@ -0,0 +1,71 @@ +import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src' +import { OpenAI } from '@langchain/openai' +import { BaseCache } from '@langchain/core/caches' + +class Sambanova_LLMs implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'Sambanova' + this.name = 'sambanova' + this.version = 1.0 + this.type = 'Sambanova' + this.icon = 'sambanova.png' + this.category = 'LLMs' + this.description = 'Wrapper around Sambanova API for large language models' + this.baseClasses = [this.type, ...getBaseClasses(OpenAI)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['sambanovaApi'] + } + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Model Name', + name: 'modelName', + type: 'string', + default: 'Meta-Llama-3.3-70B-Instruct', + description: 'For more details see https://docs.sambanova.ai/cloud/docs/get-started/supported-models', + optional: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const cache = nodeData.inputs?.cache as BaseCache + const modelName = nodeData.inputs?.modelName as string + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const sambanovaKey = getCredentialParam('sambanovaApiKey', credentialData, nodeData) + + const obj: any = { + model: modelName, + configuration: { + baseURL: 'https://api.sambanova.ai/v1', + apiKey: sambanovaKey + } + } + if (cache) obj.cache = cache + + const sambanova = new OpenAI(obj) + return sambanova + } +} + +module.exports = { nodeClass: Sambanova_LLMs } diff --git a/packages/components/nodes/llms/SambaNova/sambanova.png b/packages/components/nodes/llms/SambaNova/sambanova.png new file mode 100644 index 000000000..8bc16c5d5 Binary files /dev/null and b/packages/components/nodes/llms/SambaNova/sambanova.png differ diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts index d99c0b546..8ac28593b 100644 --- a/packages/server/src/utils/index.ts +++ b/packages/server/src/utils/index.ts @@ -1490,6 +1490,7 @@ export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNod 'chatTogetherAI', 'chatTogetherAI_LlamaIndex', 'chatFireworks', + 'ChatSambanova', 'chatBaiduWenxin' ], LLMs: ['azureOpenAI', 'openAI', 'ollama']