Feature: Add SambaNova (#4961)

* add sambanova

* add sambanova credential

* fix samba nova chat node

---------

Co-authored-by: Henry <hzj94@hotmail.com>
This commit is contained in:
Luis Felipe Salazar Ucros 2025-08-18 09:09:39 -05:00 committed by GitHub
parent ad0679801a
commit bf1ddc3be5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 218 additions and 0 deletions

View File

@ -0,0 +1,23 @@
import { INodeParams, INodeCredential } from '../src/Interface'
class SambanovaApi implements INodeCredential {
label: string
name: string
version: number
inputs: INodeParams[]
constructor() {
this.label = 'Sambanova API'
this.name = 'sambanovaApi'
this.version = 1.0
this.inputs = [
{
label: 'Sambanova Api Key',
name: 'sambanovaApiKey',
type: 'password'
}
]
}
}
module.exports = { credClass: SambanovaApi }

View File

@ -0,0 +1,123 @@
import { BaseCache } from '@langchain/core/caches'
import { ChatOpenAI, ChatOpenAIFields } from '@langchain/openai'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
class ChatSambanova_ChatModels implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]
constructor() {
this.label = 'ChatSambanova'
this.name = 'chatSambanova'
this.version = 1.0
this.type = 'ChatSambanova'
this.icon = 'sambanova.png'
this.category = 'Chat Models'
this.description = 'Wrapper around Sambanova Chat Endpoints'
this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['sambanovaApi']
}
this.inputs = [
{
label: 'Cache',
name: 'cache',
type: 'BaseCache',
optional: true
},
{
label: 'Model',
name: 'modelName',
type: 'string',
default: 'Meta-Llama-3.3-70B-Instruct',
placeholder: 'Meta-Llama-3.3-70B-Instruct'
},
{
label: 'Temperature',
name: 'temperature',
type: 'number',
step: 0.1,
default: 0.9,
optional: true
},
{
label: 'Streaming',
name: 'streaming',
type: 'boolean',
default: true,
optional: true
},
{
label: 'BasePath',
name: 'basepath',
type: 'string',
optional: true,
default: 'htps://api.sambanova.ai/v1',
additionalParams: true
},
{
label: 'BaseOptions',
name: 'baseOptions',
type: 'json',
optional: true,
additionalParams: true
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const cache = nodeData.inputs?.cache as BaseCache
const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string
const streaming = nodeData.inputs?.streaming as boolean
const basePath = nodeData.inputs?.basepath as string
const baseOptions = nodeData.inputs?.baseOptions
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const sambanovaApiKey = getCredentialParam('sambanovaApiKey', credentialData, nodeData)
const obj: ChatOpenAIFields = {
temperature: temperature ? parseFloat(temperature) : undefined,
model: modelName,
apiKey: sambanovaApiKey,
openAIApiKey: sambanovaApiKey,
streaming: streaming ?? true
}
if (cache) obj.cache = cache
let parsedBaseOptions: any | undefined = undefined
if (baseOptions) {
try {
parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions)
} catch (exception) {
throw new Error("Invalid JSON in the ChatSambanova's BaseOptions: " + exception)
}
}
if (basePath || parsedBaseOptions) {
obj.configuration = {
baseURL: basePath,
defaultHeaders: parsedBaseOptions
}
}
const model = new ChatOpenAI(obj)
return model
}
}
module.exports = { nodeClass: ChatSambanova_ChatModels }

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

View File

@ -0,0 +1,71 @@
import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src'
import { OpenAI } from '@langchain/openai'
import { BaseCache } from '@langchain/core/caches'
class Sambanova_LLMs implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]
constructor() {
this.label = 'Sambanova'
this.name = 'sambanova'
this.version = 1.0
this.type = 'Sambanova'
this.icon = 'sambanova.png'
this.category = 'LLMs'
this.description = 'Wrapper around Sambanova API for large language models'
this.baseClasses = [this.type, ...getBaseClasses(OpenAI)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['sambanovaApi']
}
this.inputs = [
{
label: 'Cache',
name: 'cache',
type: 'BaseCache',
optional: true
},
{
label: 'Model Name',
name: 'modelName',
type: 'string',
default: 'Meta-Llama-3.3-70B-Instruct',
description: 'For more details see https://docs.sambanova.ai/cloud/docs/get-started/supported-models',
optional: true
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const cache = nodeData.inputs?.cache as BaseCache
const modelName = nodeData.inputs?.modelName as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const sambanovaKey = getCredentialParam('sambanovaApiKey', credentialData, nodeData)
const obj: any = {
model: modelName,
configuration: {
baseURL: 'https://api.sambanova.ai/v1',
apiKey: sambanovaKey
}
}
if (cache) obj.cache = cache
const sambanova = new OpenAI(obj)
return sambanova
}
}
module.exports = { nodeClass: Sambanova_LLMs }

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

View File

@ -1490,6 +1490,7 @@ export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNod
'chatTogetherAI', 'chatTogetherAI',
'chatTogetherAI_LlamaIndex', 'chatTogetherAI_LlamaIndex',
'chatFireworks', 'chatFireworks',
'ChatSambanova',
'chatBaiduWenxin' 'chatBaiduWenxin'
], ],
LLMs: ['azureOpenAI', 'openAI', 'ollama'] LLMs: ['azureOpenAI', 'openAI', 'ollama']