Merge branch 'main' into fix/stripe-issues

This commit is contained in:
Ong Chung Yau 2025-07-14 22:26:37 +08:00 committed by GitHub
commit 370c55aa78
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 52 additions and 4 deletions

View File

@ -1,7 +1,8 @@
import { BaseCache } from '@langchain/core/caches' import { BaseCache } from '@langchain/core/caches'
import { ChatXAI, ChatXAIInput } from '@langchain/xai' import { ChatXAIInput } from '@langchain/xai'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' import { ICommonObject, IMultiModalOption, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ChatXAI } from './FlowiseChatXAI'
class ChatXAI_ChatModels implements INode { class ChatXAI_ChatModels implements INode {
label: string label: string
@ -18,7 +19,7 @@ class ChatXAI_ChatModels implements INode {
constructor() { constructor() {
this.label = 'ChatXAI' this.label = 'ChatXAI'
this.name = 'chatXAI' this.name = 'chatXAI'
this.version = 1.0 this.version = 2.0
this.type = 'ChatXAI' this.type = 'ChatXAI'
this.icon = 'xai.png' this.icon = 'xai.png'
this.category = 'Chat Models' this.category = 'Chat Models'
@ -74,6 +75,15 @@ class ChatXAI_ChatModels implements INode {
step: 1, step: 1,
optional: true, optional: true,
additionalParams: true additionalParams: true
},
{
label: 'Allow Image Uploads',
name: 'allowImageUploads',
type: 'boolean',
description:
'Allow image input. Refer to the <a href="https://docs.flowiseai.com/using-flowise/uploads#image" target="_blank">docs</a> for more details.',
default: false,
optional: true
} }
] ]
} }
@ -84,6 +94,7 @@ class ChatXAI_ChatModels implements INode {
const modelName = nodeData.inputs?.modelName as string const modelName = nodeData.inputs?.modelName as string
const maxTokens = nodeData.inputs?.maxTokens as string const maxTokens = nodeData.inputs?.maxTokens as string
const streaming = nodeData.inputs?.streaming as boolean const streaming = nodeData.inputs?.streaming as boolean
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean
const credentialData = await getCredentialData(nodeData.credential ?? '', options) const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const xaiApiKey = getCredentialParam('xaiApiKey', credentialData, nodeData) const xaiApiKey = getCredentialParam('xaiApiKey', credentialData, nodeData)
@ -97,7 +108,15 @@ class ChatXAI_ChatModels implements INode {
if (cache) obj.cache = cache if (cache) obj.cache = cache
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
const model = new ChatXAI(obj) const multiModalOption: IMultiModalOption = {
image: {
allowImageUploads: allowImageUploads ?? false
}
}
const model = new ChatXAI(nodeData.id, obj)
model.setMultiModalOption(multiModalOption)
return model return model
} }
} }

View File

@ -0,0 +1,29 @@
import { ChatXAI as LCChatXAI, ChatXAIInput } from '@langchain/xai'
import { IMultiModalOption, IVisionChatModal } from '../../../src'
export class ChatXAI extends LCChatXAI implements IVisionChatModal {
configuredModel: string
configuredMaxToken?: number
multiModalOption: IMultiModalOption
id: string
constructor(id: string, fields?: ChatXAIInput) {
super(fields)
this.id = id
this.configuredModel = fields?.model ?? ''
this.configuredMaxToken = fields?.maxTokens
}
revertToOriginalModel(): void {
this.modelName = this.configuredModel
this.maxTokens = this.configuredMaxToken
}
setMultiModalOption(multiModalOption: IMultiModalOption): void {
this.multiModalOption = multiModalOption
}
setVisionModel(): void {
// pass
}
}