support image for grok 4

This commit is contained in:
Henry 2025-07-14 15:04:47 +01:00
parent e3eeb5d8a8
commit 289b36ddc1
2 changed files with 52 additions and 4 deletions

View File

@ -1,7 +1,8 @@
import { BaseCache } from '@langchain/core/caches'
import { ChatXAI, ChatXAIInput } from '@langchain/xai'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ChatXAIInput } from '@langchain/xai'
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ChatXAI } from './FlowiseChatXAI'
class ChatXAI_ChatModels implements INode {
label: string
@ -18,7 +19,7 @@ class ChatXAI_ChatModels implements INode {
constructor() {
this.label = 'ChatXAI'
this.name = 'chatXAI'
this.version = 1.0
this.version = 2.0
this.type = 'ChatXAI'
this.icon = 'xai.png'
this.category = 'Chat Models'
@ -74,6 +75,15 @@ class ChatXAI_ChatModels implements INode {
step: 1,
optional: true,
additionalParams: true
},
{
label: 'Allow Image Uploads',
name: 'allowImageUploads',
type: 'boolean',
description:
'Allow image input. Refer to the <a href="https://docs.flowiseai.com/using-flowise/uploads#image" target="_blank">docs</a> for more details.',
default: false,
optional: true
}
]
}
@ -84,6 +94,7 @@ class ChatXAI_ChatModels implements INode {
const modelName = nodeData.inputs?.modelName as string
const maxTokens = nodeData.inputs?.maxTokens as string
const streaming = nodeData.inputs?.streaming as boolean
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const xaiApiKey = getCredentialParam('xaiApiKey', credentialData, nodeData)
@ -97,7 +108,15 @@ class ChatXAI_ChatModels implements INode {
if (cache) obj.cache = cache
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
const model = new ChatXAI(obj)
const multiModalOption: IMultiModalOption = {
image: {
allowImageUploads: allowImageUploads ?? false
}
}
const model = new ChatXAI(nodeData.id, obj)
model.setMultiModalOption(multiModalOption)
return model
}
}

View File

@ -0,0 +1,29 @@
import { ChatXAI as LCChatXAI, ChatXAIInput } from '@langchain/xai'
import { IMultiModalOption, IVisionChatModal } from '../../../src'
export class ChatXAI extends LCChatXAI implements IVisionChatModal {
configuredModel: string
configuredMaxToken?: number
multiModalOption: IMultiModalOption
id: string
constructor(id: string, fields?: ChatXAIInput) {
super(fields)
this.id = id
this.configuredModel = fields?.model ?? ''
this.configuredMaxToken = fields?.maxTokens
}
revertToOriginalModel(): void {
this.modelName = this.configuredModel
this.maxTokens = this.configuredMaxToken
}
setMultiModalOption(multiModalOption: IMultiModalOption): void {
this.multiModalOption = multiModalOption
}
setVisionModel(): void {
// pass
}
}