Vertex Gemini Custom Model Changes (#3737)
This commit is contained in:
parent
e8903a5719
commit
2df129f91b
|
|
@ -23,6 +23,10 @@ class ChatVertexAI extends LcChatVertexAI implements IVisionChatModal {
|
|||
|
||||
constructor(id: string, fields?: ChatVertexAIInput) {
|
||||
// @ts-ignore
|
||||
if (fields?.model) {
|
||||
fields.modelName = fields.model
|
||||
delete fields.model
|
||||
}
|
||||
super(fields ?? {})
|
||||
this.id = id
|
||||
this.configuredModel = fields?.modelName || ''
|
||||
|
|
@ -61,7 +65,7 @@ class GoogleVertexAI_ChatModels implements INode {
|
|||
constructor() {
|
||||
this.label = 'ChatGoogleVertexAI'
|
||||
this.name = 'chatGoogleVertexAI'
|
||||
this.version = 5.0
|
||||
this.version = 5.1
|
||||
this.type = 'ChatGoogleVertexAI'
|
||||
this.icon = 'GoogleVertex.svg'
|
||||
this.category = 'Chat Models'
|
||||
|
|
@ -89,6 +93,14 @@ class GoogleVertexAI_ChatModels implements INode {
|
|||
type: 'asyncOptions',
|
||||
loadMethod: 'listModels'
|
||||
},
|
||||
{
|
||||
label: 'Custom Model Name',
|
||||
name: 'customModelName',
|
||||
type: 'string',
|
||||
placeholder: 'gemini-1.5-pro-exp-0801',
|
||||
description: 'Custom model name to use. If provided, it will override the model selected',
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Temperature',
|
||||
name: 'temperature',
|
||||
|
|
@ -163,7 +175,6 @@ class GoogleVertexAI_ChatModels implements INode {
|
|||
throw new Error(
|
||||
'Error: More than one component has been inputted. Please use only one of the following: Google Application Credential File Path or Google Credential JSON Object'
|
||||
)
|
||||
|
||||
if (googleApplicationCredentialFilePath && !googleApplicationCredential)
|
||||
authOptions.keyFile = googleApplicationCredentialFilePath
|
||||
else if (!googleApplicationCredentialFilePath && googleApplicationCredential)
|
||||
|
|
@ -174,6 +185,7 @@ class GoogleVertexAI_ChatModels implements INode {
|
|||
|
||||
const temperature = nodeData.inputs?.temperature as string
|
||||
const modelName = nodeData.inputs?.modelName as string
|
||||
const customModelName = nodeData.inputs?.customModelName as string
|
||||
const maxOutputTokens = nodeData.inputs?.maxOutputTokens as string
|
||||
const topP = nodeData.inputs?.topP as string
|
||||
const cache = nodeData.inputs?.cache as BaseCache
|
||||
|
|
@ -190,11 +202,10 @@ class GoogleVertexAI_ChatModels implements INode {
|
|||
|
||||
const obj: ChatVertexAIInput = {
|
||||
temperature: parseFloat(temperature),
|
||||
model: modelName,
|
||||
modelName: customModelName || modelName,
|
||||
streaming: streaming ?? true
|
||||
}
|
||||
if (Object.keys(authOptions).length !== 0) obj.authOptions = authOptions
|
||||
|
||||
if (maxOutputTokens) obj.maxOutputTokens = parseInt(maxOutputTokens, 10)
|
||||
if (topP) obj.topP = parseFloat(topP)
|
||||
if (cache) obj.cache = cache
|
||||
|
|
|
|||
Loading…
Reference in New Issue