Feature/add max iterations to agents (#2161)
add max iterations to agents
This commit is contained in:
parent
e7a58fc700
commit
a82dd93c6c
|
|
@ -86,6 +86,13 @@ class ConversationalAgent_Agents implements INode {
|
|||
type: 'Moderation',
|
||||
optional: true,
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
this.sessionId = fields?.sessionId
|
||||
|
|
@ -176,6 +183,7 @@ const prepareAgent = async (
|
|||
flowObj: { sessionId?: string; chatId?: string; input?: string }
|
||||
) => {
|
||||
const model = nodeData.inputs?.model as BaseChatModel
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
let tools = nodeData.inputs?.tools as Tool[]
|
||||
tools = flatten(tools)
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
|
|
@ -247,7 +255,8 @@ const prepareAgent = async (
|
|||
sessionId: flowObj?.sessionId,
|
||||
chatId: flowObj?.chatId,
|
||||
input: flowObj?.input,
|
||||
verbose: process.env.DEBUG === 'true'
|
||||
verbose: process.env.DEBUG === 'true',
|
||||
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
|
||||
})
|
||||
|
||||
return executor
|
||||
|
|
|
|||
|
|
@ -71,6 +71,13 @@ class ConversationalRetrievalAgent_Agents implements INode {
|
|||
type: 'Moderation',
|
||||
optional: true,
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
this.sessionId = fields?.sessionId
|
||||
|
|
@ -131,6 +138,7 @@ const prepareAgent = (nodeData: INodeData, flowObj: { sessionId?: string; chatId
|
|||
const model = nodeData.inputs?.model as ChatOpenAI
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
let tools = nodeData.inputs?.tools
|
||||
tools = flatten(tools)
|
||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||
|
|
@ -168,7 +176,8 @@ const prepareAgent = (nodeData: INodeData, flowObj: { sessionId?: string; chatId
|
|||
chatId: flowObj?.chatId,
|
||||
input: flowObj?.input,
|
||||
returnIntermediateSteps: true,
|
||||
verbose: process.env.DEBUG === 'true' ? true : false
|
||||
verbose: process.env.DEBUG === 'true' ? true : false,
|
||||
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
|
||||
})
|
||||
|
||||
return executor
|
||||
|
|
|
|||
|
|
@ -58,6 +58,13 @@ class MRKLAgentChat_Agents implements INode {
|
|||
type: 'Moderation',
|
||||
optional: true,
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
this.sessionId = fields?.sessionId
|
||||
|
|
@ -69,6 +76,7 @@ class MRKLAgentChat_Agents implements INode {
|
|||
|
||||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | object> {
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
const model = nodeData.inputs?.model as BaseChatModel
|
||||
let tools = nodeData.inputs?.tools as Tool[]
|
||||
const moderations = nodeData.inputs?.inputModeration as Moderation[]
|
||||
|
|
@ -120,7 +128,8 @@ class MRKLAgentChat_Agents implements INode {
|
|||
const executor = new AgentExecutor({
|
||||
agent,
|
||||
tools,
|
||||
verbose: process.env.DEBUG === 'true'
|
||||
verbose: process.env.DEBUG === 'true',
|
||||
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
|
||||
})
|
||||
|
||||
const callbacks = await additionalCallbacks(nodeData, options)
|
||||
|
|
|
|||
|
|
@ -50,6 +50,13 @@ class MRKLAgentLLM_Agents implements INode {
|
|||
type: 'Moderation',
|
||||
optional: true,
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -60,6 +67,7 @@ class MRKLAgentLLM_Agents implements INode {
|
|||
|
||||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | object> {
|
||||
const model = nodeData.inputs?.model as BaseLanguageModel
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
let tools = nodeData.inputs?.tools as Tool[]
|
||||
const moderations = nodeData.inputs?.inputModeration as Moderation[]
|
||||
|
||||
|
|
@ -87,7 +95,8 @@ class MRKLAgentLLM_Agents implements INode {
|
|||
const executor = new AgentExecutor({
|
||||
agent,
|
||||
tools,
|
||||
verbose: process.env.DEBUG === 'true' ? true : false
|
||||
verbose: process.env.DEBUG === 'true' ? true : false,
|
||||
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
|
||||
})
|
||||
|
||||
const callbacks = await additionalCallbacks(nodeData, options)
|
||||
|
|
|
|||
|
|
@ -69,6 +69,13 @@ class MistralAIToolAgent_Agents implements INode {
|
|||
type: 'Moderation',
|
||||
optional: true,
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
this.sessionId = fields?.sessionId
|
||||
|
|
@ -157,6 +164,7 @@ class MistralAIToolAgent_Agents implements INode {
|
|||
const prepareAgent = (nodeData: INodeData, flowObj: { sessionId?: string; chatId?: string; input?: string }) => {
|
||||
const model = nodeData.inputs?.model as ChatOpenAI
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let tools = nodeData.inputs?.tools
|
||||
tools = flatten(tools)
|
||||
|
|
@ -194,7 +202,8 @@ const prepareAgent = (nodeData: INodeData, flowObj: { sessionId?: string; chatId
|
|||
sessionId: flowObj?.sessionId,
|
||||
chatId: flowObj?.chatId,
|
||||
input: flowObj?.input,
|
||||
verbose: process.env.DEBUG === 'true' ? true : false
|
||||
verbose: process.env.DEBUG === 'true' ? true : false,
|
||||
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
|
||||
})
|
||||
|
||||
return executor
|
||||
|
|
|
|||
|
|
@ -68,6 +68,13 @@ class OpenAIFunctionAgent_Agents implements INode {
|
|||
type: 'Moderation',
|
||||
optional: true,
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
this.sessionId = fields?.sessionId
|
||||
|
|
@ -155,6 +162,7 @@ class OpenAIFunctionAgent_Agents implements INode {
|
|||
|
||||
const prepareAgent = (nodeData: INodeData, flowObj: { sessionId?: string; chatId?: string; input?: string }) => {
|
||||
const model = nodeData.inputs?.model as ChatOpenAI
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let tools = nodeData.inputs?.tools
|
||||
|
|
@ -193,7 +201,8 @@ const prepareAgent = (nodeData: INodeData, flowObj: { sessionId?: string; chatId
|
|||
sessionId: flowObj?.sessionId,
|
||||
chatId: flowObj?.chatId,
|
||||
input: flowObj?.input,
|
||||
verbose: process.env.DEBUG === 'true' ? true : false
|
||||
verbose: process.env.DEBUG === 'true' ? true : false,
|
||||
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
|
||||
})
|
||||
|
||||
return executor
|
||||
|
|
|
|||
|
|
@ -69,6 +69,13 @@ class OpenAIToolAgent_Agents implements INode {
|
|||
type: 'Moderation',
|
||||
optional: true,
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
this.sessionId = fields?.sessionId
|
||||
|
|
@ -156,6 +163,7 @@ class OpenAIToolAgent_Agents implements INode {
|
|||
|
||||
const prepareAgent = (nodeData: INodeData, flowObj: { sessionId?: string; chatId?: string; input?: string }) => {
|
||||
const model = nodeData.inputs?.model as ChatOpenAI
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let tools = nodeData.inputs?.tools
|
||||
|
|
@ -192,7 +200,8 @@ const prepareAgent = (nodeData: INodeData, flowObj: { sessionId?: string; chatId
|
|||
sessionId: flowObj?.sessionId,
|
||||
chatId: flowObj?.chatId,
|
||||
input: flowObj?.input,
|
||||
verbose: process.env.DEBUG === 'true' ? true : false
|
||||
verbose: process.env.DEBUG === 'true' ? true : false,
|
||||
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
|
||||
})
|
||||
|
||||
return executor
|
||||
|
|
|
|||
|
|
@ -93,6 +93,13 @@ class XMLAgent_Agents implements INode {
|
|||
type: 'Moderation',
|
||||
optional: true,
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
this.sessionId = fields?.sessionId
|
||||
|
|
@ -179,6 +186,7 @@ class XMLAgent_Agents implements INode {
|
|||
|
||||
const prepareAgent = async (nodeData: INodeData, flowObj: { sessionId?: string; chatId?: string; input?: string }) => {
|
||||
const model = nodeData.inputs?.model as BaseChatModel
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let tools = nodeData.inputs?.tools
|
||||
|
|
@ -233,7 +241,8 @@ const prepareAgent = async (nodeData: INodeData, flowObj: { sessionId?: string;
|
|||
chatId: flowObj?.chatId,
|
||||
input: flowObj?.input,
|
||||
isXML: true,
|
||||
verbose: process.env.DEBUG === 'true' ? true : false
|
||||
verbose: process.env.DEBUG === 'true' ? true : false,
|
||||
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
|
||||
})
|
||||
|
||||
return executor
|
||||
|
|
|
|||
Loading…
Reference in New Issue