Support custom base URL for ChatGoogleGenerativeAI (#4347)

feat: Support custom base URL for ChatGoogleGenerativeAI
This commit is contained in:
Hans 2025-04-27 12:15:17 +08:00 committed by GitHub
parent ac0450523a
commit fc6eea7653
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 29 additions and 12 deletions

View File

@ -163,6 +163,14 @@ class GoogleGenerativeAI_ChatModels implements INode {
optional: true,
additionalParams: true
},
{
label: 'Base URL',
name: 'baseUrl',
type: 'string',
description: 'Base URL for the API. Leave empty to use the default.',
optional: true,
additionalParams: true
},
{
label: 'Allow Image Uploads',
name: 'allowImageUploads',
@ -197,6 +205,7 @@ class GoogleGenerativeAI_ChatModels implements INode {
const cache = nodeData.inputs?.cache as BaseCache
const contextCache = nodeData.inputs?.contextCache as FlowiseGoogleAICacheManager
const streaming = nodeData.inputs?.streaming as boolean
const baseUrl = nodeData.inputs?.baseUrl as string | undefined
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean
@ -211,6 +220,7 @@ class GoogleGenerativeAI_ChatModels implements INode {
if (topK) obj.topK = parseFloat(topK)
if (cache) obj.cache = cache
if (temperature) obj.temperature = parseFloat(temperature)
if (baseUrl) obj.baseUrl = baseUrl
// Safety Settings
let harmCategories: string[] = convertMultiOptionsToStringArray(harmCategory)

View File

@ -81,6 +81,8 @@ class LangchainChatGoogleGenerativeAI
apiKey?: string
baseUrl?: string
streaming = false
streamUsage = true
@ -151,7 +153,8 @@ class LangchainChatGoogleGenerativeAI
}
async getClient(prompt?: Content[], tools?: Tool[]) {
this.client = new GenerativeAI(this.apiKey ?? '').getGenerativeModel({
this.client = new GenerativeAI(this.apiKey ?? '').getGenerativeModel(
{
model: this.modelName,
tools,
safetySettings: this.safetySettings as SafetySetting[],
@ -163,7 +166,11 @@ class LangchainChatGoogleGenerativeAI
topP: this.topP,
topK: this.topK
}
})
},
{
baseUrl: this.baseUrl
}
)
if (this.contextCache) {
const cachedContent = await this.contextCache.lookup({
contents: prompt ? [{ ...prompt[0], parts: prompt[0].parts.slice(0, 1) }] : [],