Support custom base URL for ChatGoogleGenerativeAI (#4347)

feat: Support custom base URL for ChatGoogleGenerativeAI
This commit is contained in:
Hans 2025-04-27 12:15:17 +08:00 committed by GitHub
parent ac0450523a
commit fc6eea7653
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 29 additions and 12 deletions

View File

@ -163,6 +163,14 @@ class GoogleGenerativeAI_ChatModels implements INode {
optional: true, optional: true,
additionalParams: true additionalParams: true
}, },
{
label: 'Base URL',
name: 'baseUrl',
type: 'string',
description: 'Base URL for the API. Leave empty to use the default.',
optional: true,
additionalParams: true
},
{ {
label: 'Allow Image Uploads', label: 'Allow Image Uploads',
name: 'allowImageUploads', name: 'allowImageUploads',
@ -197,6 +205,7 @@ class GoogleGenerativeAI_ChatModels implements INode {
const cache = nodeData.inputs?.cache as BaseCache const cache = nodeData.inputs?.cache as BaseCache
const contextCache = nodeData.inputs?.contextCache as FlowiseGoogleAICacheManager const contextCache = nodeData.inputs?.contextCache as FlowiseGoogleAICacheManager
const streaming = nodeData.inputs?.streaming as boolean const streaming = nodeData.inputs?.streaming as boolean
const baseUrl = nodeData.inputs?.baseUrl as string | undefined
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean
@ -211,6 +220,7 @@ class GoogleGenerativeAI_ChatModels implements INode {
if (topK) obj.topK = parseFloat(topK) if (topK) obj.topK = parseFloat(topK)
if (cache) obj.cache = cache if (cache) obj.cache = cache
if (temperature) obj.temperature = parseFloat(temperature) if (temperature) obj.temperature = parseFloat(temperature)
if (baseUrl) obj.baseUrl = baseUrl
// Safety Settings // Safety Settings
let harmCategories: string[] = convertMultiOptionsToStringArray(harmCategory) let harmCategories: string[] = convertMultiOptionsToStringArray(harmCategory)

View File

@ -81,6 +81,8 @@ class LangchainChatGoogleGenerativeAI
apiKey?: string apiKey?: string
baseUrl?: string
streaming = false streaming = false
streamUsage = true streamUsage = true
@ -151,19 +153,24 @@ class LangchainChatGoogleGenerativeAI
} }
async getClient(prompt?: Content[], tools?: Tool[]) { async getClient(prompt?: Content[], tools?: Tool[]) {
this.client = new GenerativeAI(this.apiKey ?? '').getGenerativeModel({ this.client = new GenerativeAI(this.apiKey ?? '').getGenerativeModel(
model: this.modelName, {
tools, model: this.modelName,
safetySettings: this.safetySettings as SafetySetting[], tools,
generationConfig: { safetySettings: this.safetySettings as SafetySetting[],
candidateCount: 1, generationConfig: {
stopSequences: this.stopSequences, candidateCount: 1,
maxOutputTokens: this.maxOutputTokens, stopSequences: this.stopSequences,
temperature: this.temperature, maxOutputTokens: this.maxOutputTokens,
topP: this.topP, temperature: this.temperature,
topK: this.topK topP: this.topP,
topK: this.topK
}
},
{
baseUrl: this.baseUrl
} }
}) )
if (this.contextCache) { if (this.contextCache) {
const cachedContent = await this.contextCache.lookup({ const cachedContent = await this.contextCache.lookup({
contents: prompt ? [{ ...prompt[0], parts: prompt[0].parts.slice(0, 1) }] : [], contents: prompt ? [{ ...prompt[0], parts: prompt[0].parts.slice(0, 1) }] : [],