feat: Add conversation history selection options to ConditionAgent node (#3719)

* feat: Enhance ConditionAgent with conversation history selection options

- Added a new parameter `conversationHistorySelection` to allow users to choose which messages from the conversation history to include in prompts.
- Options include: User Question, Last Conversation Message, All Conversation Messages, and Empty.
- Default selection is set to 'All Conversation Messages' for improved context management in sequential LLM and Agent nodes.

* Bump version from 2.0 to 3.0
This commit is contained in:
Jean Ibarz 2024-12-19 00:27:01 +01:00 committed by GitHub
parent 1b48d564f9
commit c809f4165a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 42 additions and 1 deletions

View File

@ -6,6 +6,7 @@ import { RunnableSequence, RunnablePassthrough, RunnableConfig } from '@langchai
import { BaseMessage } from '@langchain/core/messages'
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
import {
ConversationHistorySelection,
ICommonObject,
IDatabaseEntity,
INode,
@ -23,6 +24,7 @@ import {
customGet,
getVM,
transformObjectPropertyToFunction,
filterConversationHistory,
restructureMessages
} from '../commonUtils'
import { ChatGoogleGenerativeAI } from '../../chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI'
@ -149,7 +151,7 @@ class ConditionAgent_SeqAgents implements INode {
constructor() {
this.label = 'Condition Agent'
this.name = 'seqConditionAgent'
this.version = 2.0
this.version = 3.0
this.type = 'ConditionAgent'
this.icon = 'condition.svg'
this.category = 'Sequential Agents'
@ -185,6 +187,42 @@ class ConditionAgent_SeqAgents implements INode {
additionalParams: true,
optional: true
},
{
label: 'Conversation History',
name: 'conversationHistorySelection',
type: 'options',
options: [
{
label: 'User Question',
name: 'user_question',
description: 'Use the user question from the historical conversation messages as input.'
},
{
label: 'Last Conversation Message',
name: 'last_message',
description: 'Use the last conversation message from the historical conversation messages as input.'
},
{
label: 'All Conversation Messages',
name: 'all_messages',
description: 'Use all conversation messages from the historical conversation messages as input.'
},
{
label: 'Empty',
name: 'empty',
description:
'Do not use any messages from the conversation history. ' +
'Ensure to use either System Prompt, Human Prompt, or Messages History.'
}
],
default: 'all_messages',
optional: true,
description:
'Select which messages from the conversation history to include in the prompt. ' +
'The selected messages will be inserted between the System Prompt (if defined) and ' +
'Human Prompt.',
additionalParams: true
},
{
label: 'Human Prompt',
name: 'humanMessagePrompt',
@ -481,6 +519,9 @@ const runCondition = async (
})
}
const historySelection = (nodeData.inputs?.conversationHistorySelection || 'all_messages') as ConversationHistorySelection
// @ts-ignore
state.messages = filterConversationHistory(historySelection, input, state)
// @ts-ignore
state.messages = restructureMessages(model, state)