Fireworks ai chat model (#2596)
* fireworks chat model * add chatFireworks to streamAvaliableLLMs * add model parameter input * Update ChatFireworks.ts * fix linting --------- Co-authored-by: Henry Heng <henryheng@flowiseai.com>
This commit is contained in:
parent
9f9aff34f8
commit
88ee9b09a7
|
|
@ -0,0 +1,23 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class FireworksApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Fireworks API'
|
||||
this.name = 'fireworksApi'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Fireworks Api Key',
|
||||
name: 'fireworksApiKey',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: FireworksApi }
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
import { BaseCache } from '@langchain/core/caches'
|
||||
import { ChatFireworks } from '@langchain/community/chat_models/fireworks'
|
||||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class ChatFireworks_ChatModels implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
description: string
|
||||
baseClasses: string[]
|
||||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'ChatFireworks'
|
||||
this.name = 'chatFireworks'
|
||||
this.version = 1.0
|
||||
this.type = 'ChatFireworks'
|
||||
this.icon = 'Fireworks.png'
|
||||
this.category = 'Chat Models'
|
||||
this.description = 'Wrapper around Fireworks Chat Endpoints'
|
||||
this.baseClasses = [this.type, ...getBaseClasses(ChatFireworks)]
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['fireworksApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Cache',
|
||||
name: 'cache',
|
||||
type: 'BaseCache',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Model',
|
||||
name: 'modelName',
|
||||
type: 'string',
|
||||
default: 'accounts/fireworks/models/llama-v2-13b-chat',
|
||||
placeholder: 'accounts/fireworks/models/llama-v2-13b-chat'
|
||||
},
|
||||
{
|
||||
label: 'Temperature',
|
||||
name: 'temperature',
|
||||
type: 'number',
|
||||
step: 0.1,
|
||||
default: 0.9,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const cache = nodeData.inputs?.cache as BaseCache
|
||||
const temperature = nodeData.inputs?.temperature as string
|
||||
const modelName = nodeData.inputs?.modelName as string
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const fireworksApiKey = getCredentialParam('fireworksApiKey', credentialData, nodeData)
|
||||
|
||||
const obj: Partial<ChatFireworks> = {
|
||||
fireworksApiKey,
|
||||
model: modelName,
|
||||
modelName,
|
||||
temperature: temperature ? parseFloat(temperature) : undefined
|
||||
}
|
||||
if (cache) obj.cache = cache
|
||||
|
||||
const model = new ChatFireworks(obj)
|
||||
return model
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ChatFireworks_ChatModels }
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 6.6 KiB |
|
|
@ -1095,7 +1095,8 @@ export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNod
|
|||
'chatCohere',
|
||||
'chatGoogleGenerativeAI',
|
||||
'chatTogetherAI',
|
||||
'chatTogetherAI_LlamaIndex'
|
||||
'chatTogetherAI_LlamaIndex',
|
||||
'chatFireworks'
|
||||
],
|
||||
LLMs: ['azureOpenAI', 'openAI', 'ollama']
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue