Multimodal Fixes for cyclic (circular) dependencies during langsmith analysis...
This commit is contained in:
parent
52ffa1772b
commit
10fc1bf08d
|
|
@ -39,10 +39,9 @@ export class ChatOpenAI extends LangchainChatOpenAI {
|
|||
return super.generate(messages, options, callbacks)
|
||||
}
|
||||
|
||||
private async injectMultiModalMessages(messages: BaseMessageLike[][], nodeOptions: MultiModalOptions) {
|
||||
const nodeData = nodeOptions.nodeData
|
||||
const optionsData = nodeOptions.nodeOptions
|
||||
const messageContent = addImagesToMessages(nodeData, optionsData, this.multiModalOption)
|
||||
private async injectMultiModalMessages(messages: BaseMessageLike[][], options: MultiModalOptions) {
|
||||
const optionsData = options.nodeOptions
|
||||
const messageContent = addImagesToMessages(optionsData, this.multiModalOption)
|
||||
if (messageContent?.length) {
|
||||
if (messages[0].length > 0 && messages[0][messages[0].length - 1] instanceof HumanMessage) {
|
||||
// Change model to gpt-4-vision
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import { ICommonObject, IFileUpload, IMultiModalOption, INodeData, MessageContentImageUrl } from './Interface'
|
||||
import { ChatOpenAI as LangchainChatOpenAI } from 'langchain/chat_models/openai'
|
||||
import path from 'path'
|
||||
import { getStoragePath } from './utils'
|
||||
import fs from 'fs'
|
||||
|
|
@ -10,13 +9,12 @@ import { AgentExecutor as LcAgentExecutor, ChatAgent, RunnableAgent } from 'lang
|
|||
import { AgentExecutor } from './agents'
|
||||
|
||||
export interface MultiModalOptions {
|
||||
nodeData: INodeData
|
||||
nodeOptions: ICommonObject
|
||||
}
|
||||
|
||||
export const injectLLMChainNodeData = (nodeData: INodeData, options: ICommonObject) => {
|
||||
let llmChain = nodeData.instance as LLMChain
|
||||
;(llmChain.llm as ChatOpenAI).lc_kwargs.chainData = { nodeData: nodeData, nodeOptions: options }
|
||||
;(llmChain.llm as ChatOpenAI).lc_kwargs.chainData = { nodeOptions: getUploadsFromOptions(options) }
|
||||
}
|
||||
|
||||
export const injectAgentExecutorNodeData = (agentExecutor: AgentExecutor, nodeData: INodeData, options: ICommonObject) => {
|
||||
|
|
@ -29,59 +27,61 @@ export const injectAgentExecutorNodeData = (agentExecutor: AgentExecutor, nodeDa
|
|||
export const injectLcAgentExecutorNodeData = (agentExecutor: LcAgentExecutor, nodeData: INodeData, options: ICommonObject) => {
|
||||
if (agentExecutor.agent instanceof ChatAgent) {
|
||||
let llmChain = agentExecutor.agent.llmChain as LLMChain
|
||||
;(llmChain.llm as ChatOpenAI).lc_kwargs.chainData = { nodeData: nodeData, nodeOptions: options }
|
||||
;(llmChain.llm as ChatOpenAI).lc_kwargs.chainData = { nodeOptions: getUploadsFromOptions(options) }
|
||||
}
|
||||
}
|
||||
|
||||
export const injectRunnableNodeData = (runnableSequence: RunnableSequence, nodeData: INodeData, options: ICommonObject) => {
|
||||
runnableSequence.steps.forEach((step) => {
|
||||
if (step instanceof ChatOpenAI) {
|
||||
;(step as ChatOpenAI).lc_kwargs.chainData = { nodeData: nodeData, nodeOptions: options }
|
||||
;(step as ChatOpenAI).lc_kwargs.chainData = { nodeOptions: getUploadsFromOptions(options) }
|
||||
}
|
||||
|
||||
if (step instanceof RunnableBinding) {
|
||||
if ((step as RunnableBinding<any, any>).bound instanceof ChatOpenAI) {
|
||||
;((step as RunnableBinding<any, any>).bound as ChatOpenAI).lc_kwargs.chainData = {
|
||||
nodeData: nodeData,
|
||||
nodeOptions: options
|
||||
nodeOptions: getUploadsFromOptions(options)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export const addImagesToMessages = (
|
||||
nodeData: INodeData,
|
||||
options: ICommonObject,
|
||||
multiModalOption?: IMultiModalOption
|
||||
): MessageContentImageUrl[] => {
|
||||
const imageContent: MessageContentImageUrl[] = []
|
||||
let model = nodeData.inputs?.model
|
||||
const getUploadsFromOptions = (options: ICommonObject): ICommonObject => {
|
||||
if (options?.uploads) {
|
||||
return {
|
||||
uploads: options.uploads,
|
||||
chatflowid: options.chatflowid,
|
||||
chatId: options.chatId
|
||||
}
|
||||
}
|
||||
return {}
|
||||
}
|
||||
|
||||
export const addImagesToMessages = (options: ICommonObject, multiModalOption?: IMultiModalOption): MessageContentImageUrl[] => {
|
||||
const imageContent: MessageContentImageUrl[] = []
|
||||
|
||||
if (model instanceof LangchainChatOpenAI && multiModalOption) {
|
||||
// Image Uploaded
|
||||
if (multiModalOption.image && multiModalOption.image.allowImageUploads && options?.uploads && options?.uploads.length > 0) {
|
||||
if (multiModalOption?.image && multiModalOption?.image.allowImageUploads && options?.uploads && options?.uploads.length > 0) {
|
||||
const imageUploads = getImageUploads(options.uploads)
|
||||
for (const upload of imageUploads) {
|
||||
let bf = upload.data
|
||||
if (upload.type == 'stored-file') {
|
||||
const filePath = path.join(getStoragePath(), options.chatflowid, options.chatId, upload.name)
|
||||
|
||||
// as the image is stored in the server, read the file and convert it to base64
|
||||
const contents = fs.readFileSync(filePath)
|
||||
bf = 'data:' + upload.mime + ';base64,' + contents.toString('base64')
|
||||
let bf = 'data:' + upload.mime + ';base64,' + contents.toString('base64')
|
||||
|
||||
imageContent.push({
|
||||
type: 'image_url',
|
||||
image_url: {
|
||||
url: bf,
|
||||
detail: multiModalOption.image.imageResolution ?? 'low'
|
||||
detail: multiModalOption?.image.imageResolution ?? 'low'
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return imageContent
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue