Chore/API for AgentflowV2 (#4696)
* Enhancement: Introduce prepended chat history handling in Agent and LLM nodes. - Added support for `prependedChatHistory` in both `Agent` and `LLM` classes to allow for initial message context. - Implemented validation for history schema in execution flow to ensure proper format. - Refactored utility functions to include JSON sanitization and validation methods for improved data handling. * update prediction swagger
This commit is contained in:
parent
035b5555a9
commit
543800562e
|
|
@ -1216,15 +1216,18 @@ paths:
|
||||||
security:
|
security:
|
||||||
- bearerAuth: []
|
- bearerAuth: []
|
||||||
operationId: createPrediction
|
operationId: createPrediction
|
||||||
summary: Create a new prediction
|
summary: Send message to flow and get AI response
|
||||||
description: Create a new prediction
|
description: |
|
||||||
|
Send a message to your flow and receive an AI-generated response. This is the primary endpoint for interacting with your flows and assistants.
|
||||||
|
**Authentication**: API key may be required depending on flow settings.
|
||||||
parameters:
|
parameters:
|
||||||
- in: path
|
- in: path
|
||||||
name: id
|
name: id
|
||||||
required: true
|
required: true
|
||||||
schema:
|
schema:
|
||||||
type: string
|
type: string
|
||||||
description: Chatflow ID
|
description: Flow ID - the unique identifier of your flow
|
||||||
|
example: 'your-flow-id'
|
||||||
requestBody:
|
requestBody:
|
||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
|
|
@ -1236,24 +1239,36 @@ paths:
|
||||||
properties:
|
properties:
|
||||||
question:
|
question:
|
||||||
type: string
|
type: string
|
||||||
description: Question to ask during the prediction process
|
description: Question/message to send to the flow
|
||||||
|
example: 'Analyze this uploaded file and summarize its contents'
|
||||||
files:
|
files:
|
||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
type: string
|
type: string
|
||||||
format: binary
|
format: binary
|
||||||
description: Files to be uploaded
|
description: Files to be uploaded (images, audio, documents, etc.)
|
||||||
modelName:
|
streaming:
|
||||||
|
type: boolean
|
||||||
|
description: Enable streaming responses
|
||||||
|
default: false
|
||||||
|
overrideConfig:
|
||||||
type: string
|
type: string
|
||||||
nullable: true
|
description: JSON string of configuration overrides
|
||||||
example: ''
|
example: '{"sessionId":"user-123","temperature":0.7}'
|
||||||
description: Other override configurations
|
history:
|
||||||
|
type: string
|
||||||
|
description: JSON string of conversation history
|
||||||
|
example: '[{"role":"userMessage","content":"Hello"},{"role":"apiMessage","content":"Hi there!"}]'
|
||||||
|
humanInput:
|
||||||
|
type: string
|
||||||
|
description: JSON string of human input for resuming execution
|
||||||
|
example: '{"type":"proceed","feedback":"Continue with the plan"}'
|
||||||
required:
|
required:
|
||||||
- question
|
- question
|
||||||
required: true
|
required: true
|
||||||
responses:
|
responses:
|
||||||
'200':
|
'200':
|
||||||
description: Prediction created successfully
|
description: Successful prediction response
|
||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
|
|
@ -1261,45 +1276,106 @@ paths:
|
||||||
properties:
|
properties:
|
||||||
text:
|
text:
|
||||||
type: string
|
type: string
|
||||||
description: The result of the prediction
|
description: The AI-generated response text
|
||||||
|
example: 'Artificial intelligence (AI) is a branch of computer science that focuses on creating systems capable of performing tasks that typically require human intelligence.'
|
||||||
json:
|
json:
|
||||||
type: object
|
type: object
|
||||||
description: The result of the prediction in JSON format if available
|
description: The result in JSON format if available (for structured outputs)
|
||||||
|
nullable: true
|
||||||
question:
|
question:
|
||||||
type: string
|
type: string
|
||||||
description: The question asked during the prediction process
|
description: The original question/message sent to the flow
|
||||||
|
example: 'What is artificial intelligence?'
|
||||||
chatId:
|
chatId:
|
||||||
type: string
|
type: string
|
||||||
description: The chat ID associated with the prediction
|
description: Unique identifier for the chat session
|
||||||
|
example: 'chat-12345'
|
||||||
chatMessageId:
|
chatMessageId:
|
||||||
type: string
|
type: string
|
||||||
description: The chat message ID associated with the prediction
|
description: Unique identifier for this specific message
|
||||||
|
example: 'msg-67890'
|
||||||
sessionId:
|
sessionId:
|
||||||
type: string
|
type: string
|
||||||
description: The session ID associated with the prediction
|
description: Session identifier for conversation continuity
|
||||||
|
example: 'user-session-123'
|
||||||
|
nullable: true
|
||||||
memoryType:
|
memoryType:
|
||||||
type: string
|
type: string
|
||||||
description: The memory type associated with the prediction
|
description: Type of memory used for conversation context
|
||||||
|
example: 'Buffer Memory'
|
||||||
|
nullable: true
|
||||||
sourceDocuments:
|
sourceDocuments:
|
||||||
type: array
|
type: array
|
||||||
|
description: Documents retrieved from vector store (if RAG is enabled)
|
||||||
items:
|
items:
|
||||||
$ref: '#/components/schemas/Document'
|
$ref: '#/components/schemas/Document'
|
||||||
|
nullable: true
|
||||||
usedTools:
|
usedTools:
|
||||||
type: array
|
type: array
|
||||||
|
description: Tools that were invoked during the response generation
|
||||||
items:
|
items:
|
||||||
$ref: '#/components/schemas/UsedTool'
|
$ref: '#/components/schemas/UsedTool'
|
||||||
fileAnnotations:
|
nullable: true
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
$ref: '#/components/schemas/FileAnnotation'
|
|
||||||
'400':
|
'400':
|
||||||
description: Invalid input provided
|
description: Bad Request - Invalid input provided or request format is incorrect
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
error:
|
||||||
|
type: string
|
||||||
|
example: 'Invalid request format. Check required fields and parameter types.'
|
||||||
|
'401':
|
||||||
|
description: Unauthorized - API key required or invalid
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
error:
|
||||||
|
type: string
|
||||||
|
example: 'Unauthorized access. Please verify your API key.'
|
||||||
'404':
|
'404':
|
||||||
description: Chatflow not found
|
description: Not Found - Chatflow with specified ID does not exist
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
error:
|
||||||
|
type: string
|
||||||
|
example: 'Chatflow not found. Please verify the chatflow ID.'
|
||||||
|
'413':
|
||||||
|
description: Payload Too Large - Request payload exceeds size limits
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
error:
|
||||||
|
type: string
|
||||||
|
example: 'Request payload too large. Please reduce file sizes or split large requests.'
|
||||||
'422':
|
'422':
|
||||||
description: Validation error
|
description: Validation Error - Request validation failed
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
error:
|
||||||
|
type: string
|
||||||
|
example: 'Validation failed. Check parameter requirements and data types.'
|
||||||
'500':
|
'500':
|
||||||
description: Internal server error
|
description: Internal Server Error - Flow configuration or execution error
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
error:
|
||||||
|
type: string
|
||||||
|
example: 'Internal server error. Check flow configuration and node settings.'
|
||||||
/tools:
|
/tools:
|
||||||
post:
|
post:
|
||||||
tags:
|
tags:
|
||||||
|
|
@ -2011,13 +2087,33 @@ components:
|
||||||
properties:
|
properties:
|
||||||
question:
|
question:
|
||||||
type: string
|
type: string
|
||||||
description: The question being asked
|
description: The question/message to send to the flow
|
||||||
|
example: 'What is artificial intelligence?'
|
||||||
|
form:
|
||||||
|
type: object
|
||||||
|
description: The form object to send to the flow (alternative to question for Agentflow V2)
|
||||||
|
additionalProperties: true
|
||||||
|
example:
|
||||||
|
title: 'Example'
|
||||||
|
count: 1
|
||||||
|
streaming:
|
||||||
|
type: boolean
|
||||||
|
description: Enable streaming responses for real-time output
|
||||||
|
default: false
|
||||||
|
example: false
|
||||||
overrideConfig:
|
overrideConfig:
|
||||||
type: object
|
type: object
|
||||||
description: The configuration to override the default prediction settings (optional)
|
description: Override flow configuration and pass variables at runtime
|
||||||
|
additionalProperties: true
|
||||||
|
example:
|
||||||
|
sessionId: 'user-session-123'
|
||||||
|
temperature: 0.7
|
||||||
|
maxTokens: 500
|
||||||
|
vars:
|
||||||
|
user_name: 'Alice'
|
||||||
history:
|
history:
|
||||||
type: array
|
type: array
|
||||||
description: The history messages to be prepended (optional)
|
description: Previous conversation messages for context
|
||||||
items:
|
items:
|
||||||
type: object
|
type: object
|
||||||
properties:
|
properties:
|
||||||
|
|
@ -2030,8 +2126,14 @@ components:
|
||||||
type: string
|
type: string
|
||||||
description: The content of the message
|
description: The content of the message
|
||||||
example: 'Hello, how can I help you?'
|
example: 'Hello, how can I help you?'
|
||||||
|
example:
|
||||||
|
- role: 'apiMessage'
|
||||||
|
content: "Hello! I'm an AI assistant. How can I help you today?"
|
||||||
|
- role: 'userMessage'
|
||||||
|
content: "Hi, my name is Sarah and I'm learning about AI"
|
||||||
uploads:
|
uploads:
|
||||||
type: array
|
type: array
|
||||||
|
description: Files to upload (images, audio, documents, etc.)
|
||||||
items:
|
items:
|
||||||
type: object
|
type: object
|
||||||
properties:
|
properties:
|
||||||
|
|
@ -2051,7 +2153,42 @@ components:
|
||||||
mime:
|
mime:
|
||||||
type: string
|
type: string
|
||||||
description: The MIME type of the file or resource
|
description: The MIME type of the file or resource
|
||||||
|
enum:
|
||||||
|
[
|
||||||
|
'image/png',
|
||||||
|
'image/jpeg',
|
||||||
|
'image/jpg',
|
||||||
|
'image/gif',
|
||||||
|
'image/webp',
|
||||||
|
'audio/mp4',
|
||||||
|
'audio/webm',
|
||||||
|
'audio/wav',
|
||||||
|
'audio/mpeg',
|
||||||
|
'audio/ogg',
|
||||||
|
'audio/aac'
|
||||||
|
]
|
||||||
example: 'image/png'
|
example: 'image/png'
|
||||||
|
example:
|
||||||
|
- type: 'file'
|
||||||
|
name: 'example.png'
|
||||||
|
data: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAABjElEQVRIS+2Vv0oDQRDG'
|
||||||
|
mime: 'image/png'
|
||||||
|
humanInput:
|
||||||
|
type: object
|
||||||
|
description: Return human feedback and resume execution from a stopped checkpoint
|
||||||
|
properties:
|
||||||
|
type:
|
||||||
|
type: string
|
||||||
|
enum: [proceed, reject]
|
||||||
|
description: Type of human input response
|
||||||
|
example: 'reject'
|
||||||
|
feedback:
|
||||||
|
type: string
|
||||||
|
description: Feedback to the last output
|
||||||
|
example: 'Include more emoji'
|
||||||
|
example:
|
||||||
|
type: 'reject'
|
||||||
|
feedback: 'Include more emoji'
|
||||||
|
|
||||||
Tool:
|
Tool:
|
||||||
type: object
|
type: object
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ import {
|
||||||
ICommonObject,
|
ICommonObject,
|
||||||
IDatabaseEntity,
|
IDatabaseEntity,
|
||||||
IHumanInput,
|
IHumanInput,
|
||||||
|
IMessage,
|
||||||
INode,
|
INode,
|
||||||
INodeData,
|
INodeData,
|
||||||
INodeOptionsValue,
|
INodeOptionsValue,
|
||||||
|
|
@ -696,6 +697,7 @@ class Agent_Agentflow implements INode {
|
||||||
const state = options.agentflowRuntime?.state as ICommonObject
|
const state = options.agentflowRuntime?.state as ICommonObject
|
||||||
const pastChatHistory = (options.pastChatHistory as BaseMessageLike[]) ?? []
|
const pastChatHistory = (options.pastChatHistory as BaseMessageLike[]) ?? []
|
||||||
const runtimeChatHistory = (options.agentflowRuntime?.chatHistory as BaseMessageLike[]) ?? []
|
const runtimeChatHistory = (options.agentflowRuntime?.chatHistory as BaseMessageLike[]) ?? []
|
||||||
|
const prependedChatHistory = options.prependedChatHistory as IMessage[]
|
||||||
const chatId = options.chatId as string
|
const chatId = options.chatId as string
|
||||||
|
|
||||||
// Initialize the LLM model instance
|
// Initialize the LLM model instance
|
||||||
|
|
@ -730,6 +732,18 @@ class Agent_Agentflow implements INode {
|
||||||
// Use to keep track of past messages with image file references
|
// Use to keep track of past messages with image file references
|
||||||
let pastImageMessagesWithFileRef: BaseMessageLike[] = []
|
let pastImageMessagesWithFileRef: BaseMessageLike[] = []
|
||||||
|
|
||||||
|
// Prepend history ONLY if it is the first node
|
||||||
|
if (prependedChatHistory.length > 0 && !runtimeChatHistory.length) {
|
||||||
|
for (const msg of prependedChatHistory) {
|
||||||
|
const role: string = msg.role === 'apiMessage' ? 'assistant' : 'user'
|
||||||
|
const content: string = msg.content ?? ''
|
||||||
|
messages.push({
|
||||||
|
role,
|
||||||
|
content
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for (const msg of agentMessages) {
|
for (const msg of agentMessages) {
|
||||||
const role = msg.role
|
const role = msg.role
|
||||||
const content = msg.content
|
const content = msg.content
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
||||||
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams, IServerSideEventStreamer } from '../../../src/Interface'
|
import { ICommonObject, IMessage, INode, INodeData, INodeOptionsValue, INodeParams, IServerSideEventStreamer } from '../../../src/Interface'
|
||||||
import { AIMessageChunk, BaseMessageLike, MessageContentText } from '@langchain/core/messages'
|
import { AIMessageChunk, BaseMessageLike, MessageContentText } from '@langchain/core/messages'
|
||||||
import { DEFAULT_SUMMARIZER_TEMPLATE } from '../prompt'
|
import { DEFAULT_SUMMARIZER_TEMPLATE } from '../prompt'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
|
|
@ -359,6 +359,7 @@ class LLM_Agentflow implements INode {
|
||||||
const state = options.agentflowRuntime?.state as ICommonObject
|
const state = options.agentflowRuntime?.state as ICommonObject
|
||||||
const pastChatHistory = (options.pastChatHistory as BaseMessageLike[]) ?? []
|
const pastChatHistory = (options.pastChatHistory as BaseMessageLike[]) ?? []
|
||||||
const runtimeChatHistory = (options.agentflowRuntime?.chatHistory as BaseMessageLike[]) ?? []
|
const runtimeChatHistory = (options.agentflowRuntime?.chatHistory as BaseMessageLike[]) ?? []
|
||||||
|
const prependedChatHistory = options.prependedChatHistory as IMessage[]
|
||||||
const chatId = options.chatId as string
|
const chatId = options.chatId as string
|
||||||
|
|
||||||
// Initialize the LLM model instance
|
// Initialize the LLM model instance
|
||||||
|
|
@ -382,6 +383,18 @@ class LLM_Agentflow implements INode {
|
||||||
// Use to keep track of past messages with image file references
|
// Use to keep track of past messages with image file references
|
||||||
let pastImageMessagesWithFileRef: BaseMessageLike[] = []
|
let pastImageMessagesWithFileRef: BaseMessageLike[] = []
|
||||||
|
|
||||||
|
// Prepend history ONLY if it is the first node
|
||||||
|
if (prependedChatHistory.length > 0 && !runtimeChatHistory.length) {
|
||||||
|
for (const msg of prependedChatHistory) {
|
||||||
|
const role: string = msg.role === 'apiMessage' ? 'assistant' : 'user'
|
||||||
|
const content: string = msg.content ?? ''
|
||||||
|
messages.push({
|
||||||
|
role,
|
||||||
|
content
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for (const msg of llmMessages) {
|
for (const msg of llmMessages) {
|
||||||
const role = msg.role
|
const role = msg.role
|
||||||
const content = msg.content
|
const content = msg.content
|
||||||
|
|
|
||||||
|
|
@ -322,7 +322,7 @@ export interface IOverrideConfig {
|
||||||
label: string
|
label: string
|
||||||
name: string
|
name: string
|
||||||
type: string
|
type: string
|
||||||
schema?: ICommonObject[]
|
schema?: ICommonObject[] | Record<string, string>
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ICredentialDataDecrypted = ICommonObject
|
export type ICredentialDataDecrypted = ICommonObject
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ import { Execution } from '../../database/entities/Execution'
|
||||||
import { InternalFlowiseError } from '../../errors/internalFlowiseError'
|
import { InternalFlowiseError } from '../../errors/internalFlowiseError'
|
||||||
import { getErrorMessage } from '../../errors/utils'
|
import { getErrorMessage } from '../../errors/utils'
|
||||||
import { ExecutionState, IAgentflowExecutedData } from '../../Interface'
|
import { ExecutionState, IAgentflowExecutedData } from '../../Interface'
|
||||||
import { _removeCredentialId } from '../../utils/buildAgentflow'
|
import { _removeCredentialId } from '../../utils'
|
||||||
import { getRunningExpressApp } from '../../utils/getRunningExpressApp'
|
import { getRunningExpressApp } from '../../utils/getRunningExpressApp'
|
||||||
|
|
||||||
export interface ExecutionFilters {
|
export interface ExecutionFilters {
|
||||||
|
|
|
||||||
|
|
@ -41,7 +41,9 @@ import {
|
||||||
getStartingNode,
|
getStartingNode,
|
||||||
getTelemetryFlowObj,
|
getTelemetryFlowObj,
|
||||||
QUESTION_VAR_PREFIX,
|
QUESTION_VAR_PREFIX,
|
||||||
CURRENT_DATE_TIME_VAR_PREFIX
|
CURRENT_DATE_TIME_VAR_PREFIX,
|
||||||
|
_removeCredentialId,
|
||||||
|
validateHistorySchema
|
||||||
} from '.'
|
} from '.'
|
||||||
import { ChatFlow } from '../database/entities/ChatFlow'
|
import { ChatFlow } from '../database/entities/ChatFlow'
|
||||||
import { Variable } from '../database/entities/Variable'
|
import { Variable } from '../database/entities/Variable'
|
||||||
|
|
@ -105,6 +107,7 @@ interface IExecuteNodeParams {
|
||||||
evaluationRunId?: string
|
evaluationRunId?: string
|
||||||
isInternal: boolean
|
isInternal: boolean
|
||||||
pastChatHistory: IMessage[]
|
pastChatHistory: IMessage[]
|
||||||
|
prependedChatHistory: IMessage[]
|
||||||
appDataSource: DataSource
|
appDataSource: DataSource
|
||||||
usageCacheManager: UsageCacheManager
|
usageCacheManager: UsageCacheManager
|
||||||
telemetry: Telemetry
|
telemetry: Telemetry
|
||||||
|
|
@ -203,21 +206,6 @@ const updateExecution = async (appDataSource: DataSource, executionId: string, w
|
||||||
await appDataSource.getRepository(Execution).save(execution)
|
await appDataSource.getRepository(Execution).save(execution)
|
||||||
}
|
}
|
||||||
|
|
||||||
export const _removeCredentialId = (obj: any): any => {
|
|
||||||
if (!obj || typeof obj !== 'object') return obj
|
|
||||||
|
|
||||||
if (Array.isArray(obj)) {
|
|
||||||
return obj.map((item) => _removeCredentialId(item))
|
|
||||||
}
|
|
||||||
|
|
||||||
const newObj: Record<string, any> = {}
|
|
||||||
for (const [key, value] of Object.entries(obj)) {
|
|
||||||
if (key === 'FLOWISE_CREDENTIAL_ID') continue
|
|
||||||
newObj[key] = _removeCredentialId(value)
|
|
||||||
}
|
|
||||||
return newObj
|
|
||||||
}
|
|
||||||
|
|
||||||
export const resolveVariables = async (
|
export const resolveVariables = async (
|
||||||
reactFlowNodeData: INodeData,
|
reactFlowNodeData: INodeData,
|
||||||
question: string,
|
question: string,
|
||||||
|
|
@ -820,6 +808,7 @@ const executeNode = async ({
|
||||||
evaluationRunId,
|
evaluationRunId,
|
||||||
parentExecutionId,
|
parentExecutionId,
|
||||||
pastChatHistory,
|
pastChatHistory,
|
||||||
|
prependedChatHistory,
|
||||||
appDataSource,
|
appDataSource,
|
||||||
usageCacheManager,
|
usageCacheManager,
|
||||||
telemetry,
|
telemetry,
|
||||||
|
|
@ -927,6 +916,7 @@ const executeNode = async ({
|
||||||
humanInputAction = lastNodeOutput?.humanInputAction
|
humanInputAction = lastNodeOutput?.humanInputAction
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is when human in the loop is resumed
|
||||||
if (humanInput && nodeId === humanInput.startNodeId) {
|
if (humanInput && nodeId === humanInput.startNodeId) {
|
||||||
reactFlowNodeData.inputs = { ...reactFlowNodeData.inputs, humanInput }
|
reactFlowNodeData.inputs = { ...reactFlowNodeData.inputs, humanInput }
|
||||||
// Remove the stopped humanInput from execution data
|
// Remove the stopped humanInput from execution data
|
||||||
|
|
@ -973,6 +963,7 @@ const executeNode = async ({
|
||||||
isLastNode,
|
isLastNode,
|
||||||
sseStreamer,
|
sseStreamer,
|
||||||
pastChatHistory,
|
pastChatHistory,
|
||||||
|
prependedChatHistory,
|
||||||
agentflowRuntime,
|
agentflowRuntime,
|
||||||
abortController,
|
abortController,
|
||||||
analyticHandlers,
|
analyticHandlers,
|
||||||
|
|
@ -1297,6 +1288,17 @@ export const executeAgentFlow = async ({
|
||||||
const chatflowid = chatflow.id
|
const chatflowid = chatflow.id
|
||||||
const sessionId = incomingInput.sessionId ?? chatId
|
const sessionId = incomingInput.sessionId ?? chatId
|
||||||
const humanInput: IHumanInput | undefined = incomingInput.humanInput
|
const humanInput: IHumanInput | undefined = incomingInput.humanInput
|
||||||
|
|
||||||
|
// Validate history schema if provided
|
||||||
|
if (incomingInput.history && incomingInput.history.length > 0) {
|
||||||
|
if (!validateHistorySchema(incomingInput.history)) {
|
||||||
|
throw new Error(
|
||||||
|
'Invalid history format. Each history item must have: ' + '{ role: "apiMessage" | "userMessage", content: string }'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const prependedChatHistory = incomingInput.history ?? []
|
||||||
const apiMessageId = uuidv4()
|
const apiMessageId = uuidv4()
|
||||||
|
|
||||||
/*** Get chatflows and prepare data ***/
|
/*** Get chatflows and prepare data ***/
|
||||||
|
|
@ -1413,35 +1415,90 @@ export const executeAgentFlow = async ({
|
||||||
}
|
}
|
||||||
|
|
||||||
// If it is human input, find the last checkpoint and resume
|
// If it is human input, find the last checkpoint and resume
|
||||||
if (humanInput?.startNodeId) {
|
if (humanInput) {
|
||||||
if (!previousExecution) {
|
if (!previousExecution) {
|
||||||
throw new Error(`No previous execution found for session ${sessionId}`)
|
throw new Error(`No previous execution found for session ${sessionId}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (previousExecution.state !== 'STOPPED') {
|
let executionData = JSON.parse(previousExecution.executionData) as IAgentflowExecutedData[]
|
||||||
|
let shouldUpdateExecution = false
|
||||||
|
|
||||||
|
// Handle different execution states
|
||||||
|
if (previousExecution.state === 'STOPPED') {
|
||||||
|
// Normal case - execution is stopped and ready to resume
|
||||||
|
logger.debug(` ✅ Previous execution is in STOPPED state, ready to resume`)
|
||||||
|
} else if (previousExecution.state === 'ERROR') {
|
||||||
|
// Check if second-to-last execution item is STOPPED and last is ERROR
|
||||||
|
if (executionData.length >= 2) {
|
||||||
|
const lastItem = executionData[executionData.length - 1]
|
||||||
|
const secondLastItem = executionData[executionData.length - 2]
|
||||||
|
|
||||||
|
if (lastItem.status === 'ERROR' && secondLastItem.status === 'STOPPED') {
|
||||||
|
logger.debug(` 🔄 Found ERROR after STOPPED - removing last error item to allow retry`)
|
||||||
|
logger.debug(` Removing: ${lastItem.nodeId} (${lastItem.nodeLabel}) - ${lastItem.data?.error || 'Unknown error'}`)
|
||||||
|
|
||||||
|
// Remove the last ERROR item
|
||||||
|
executionData = executionData.slice(0, -1)
|
||||||
|
shouldUpdateExecution = true
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Cannot resume execution ${previousExecution.id} because it is in 'ERROR' state ` +
|
||||||
|
`and the previous item is not in 'STOPPED' state. Only executions that ended with a ` +
|
||||||
|
`STOPPED state (or ERROR after STOPPED) can be resumed.`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Cannot resume execution ${previousExecution.id} because it is in 'ERROR' state ` +
|
||||||
|
`with insufficient execution data. Only executions in 'STOPPED' state can be resumed.`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Cannot resume execution ${previousExecution.id} because it is in '${previousExecution.state}' state. ` +
|
`Cannot resume execution ${previousExecution.id} because it is in '${previousExecution.state}' state. ` +
|
||||||
`Only executions in 'STOPPED' state can be resumed.`
|
`Only executions in 'STOPPED' state (or 'ERROR' after 'STOPPED') can be resumed.`
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
startingNodeIds.push(humanInput.startNodeId)
|
let startNodeId = humanInput.startNodeId
|
||||||
checkForMultipleStartNodes(startingNodeIds, isRecursive, nodes)
|
|
||||||
|
|
||||||
const executionData = JSON.parse(previousExecution.executionData) as IAgentflowExecutedData[]
|
// If startNodeId is not provided, find the last node with STOPPED status from execution data
|
||||||
|
if (!startNodeId) {
|
||||||
|
// Search in reverse order to find the last (most recent) STOPPED node
|
||||||
|
const stoppedNode = [...executionData].reverse().find((data) => data.status === 'STOPPED')
|
||||||
|
|
||||||
// Verify that the humanInputAgentflow node exists in previous execution
|
if (!stoppedNode) {
|
||||||
const humanInputNodeExists = executionData.some((data) => data.nodeId === humanInput.startNodeId)
|
throw new Error('No stopped node found in previous execution data to resume from')
|
||||||
|
}
|
||||||
|
|
||||||
if (!humanInputNodeExists) {
|
startNodeId = stoppedNode.nodeId
|
||||||
|
logger.debug(` 🔍 Auto-detected stopped node to resume from: ${startNodeId} (${stoppedNode.nodeLabel})`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify that the node exists in previous execution
|
||||||
|
const nodeExists = executionData.some((data) => data.nodeId === startNodeId)
|
||||||
|
|
||||||
|
if (!nodeExists) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Human Input node ${humanInput.startNodeId} not found in previous execution. ` +
|
`Node ${startNodeId} not found in previous execution. ` +
|
||||||
`This could indicate an invalid resume attempt or a modified flow.`
|
`This could indicate an invalid resume attempt or a modified flow.`
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
startingNodeIds.push(startNodeId)
|
||||||
|
checkForMultipleStartNodes(startingNodeIds, isRecursive, nodes)
|
||||||
|
|
||||||
agentFlowExecutedData.push(...executionData)
|
agentFlowExecutedData.push(...executionData)
|
||||||
|
|
||||||
|
// Update execution data if we removed an error item
|
||||||
|
if (shouldUpdateExecution) {
|
||||||
|
logger.debug(` 📝 Updating execution data after removing error item`)
|
||||||
|
await updateExecution(appDataSource, previousExecution.id, workspaceId, {
|
||||||
|
executionData: JSON.stringify(executionData),
|
||||||
|
state: 'INPROGRESS'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// Get last state
|
// Get last state
|
||||||
const lastState = executionData[executionData.length - 1].data.state
|
const lastState = executionData[executionData.length - 1].data.state
|
||||||
|
|
||||||
|
|
@ -1454,6 +1511,9 @@ export const executeAgentFlow = async ({
|
||||||
})
|
})
|
||||||
newExecution = previousExecution
|
newExecution = previousExecution
|
||||||
parentExecutionId = previousExecution.id
|
parentExecutionId = previousExecution.id
|
||||||
|
|
||||||
|
// Update humanInput with the resolved startNodeId
|
||||||
|
humanInput.startNodeId = startNodeId
|
||||||
} else if (isRecursive && parentExecutionId) {
|
} else if (isRecursive && parentExecutionId) {
|
||||||
const { startingNodeIds: startingNodeIdsFromFlow } = getStartingNode(nodeDependencies)
|
const { startingNodeIds: startingNodeIdsFromFlow } = getStartingNode(nodeDependencies)
|
||||||
startingNodeIds.push(...startingNodeIdsFromFlow)
|
startingNodeIds.push(...startingNodeIdsFromFlow)
|
||||||
|
|
@ -1604,6 +1664,7 @@ export const executeAgentFlow = async ({
|
||||||
parentExecutionId,
|
parentExecutionId,
|
||||||
isInternal,
|
isInternal,
|
||||||
pastChatHistory,
|
pastChatHistory,
|
||||||
|
prependedChatHistory,
|
||||||
appDataSource,
|
appDataSource,
|
||||||
usageCacheManager,
|
usageCacheManager,
|
||||||
telemetry,
|
telemetry,
|
||||||
|
|
|
||||||
|
|
@ -1103,12 +1103,13 @@ export const replaceInputsWithConfig = (
|
||||||
* Several conditions:
|
* Several conditions:
|
||||||
* 1. If config is 'analytics', always allow it
|
* 1. If config is 'analytics', always allow it
|
||||||
* 2. If config is 'vars', check its object and filter out the variables that are not enabled for override
|
* 2. If config is 'vars', check its object and filter out the variables that are not enabled for override
|
||||||
* 3. If typeof config's value is an object, check if the node id is in the overrideConfig object and if the parameter (systemMessagePrompt) is enabled
|
* 3. If typeof config's value is an array, check if the parameter is enabled and apply directly
|
||||||
|
* 4. If typeof config's value is an object, check if the node id is in the overrideConfig object and if the parameter (systemMessagePrompt) is enabled
|
||||||
* Example:
|
* Example:
|
||||||
* "systemMessagePrompt": {
|
* "systemMessagePrompt": {
|
||||||
* "chatPromptTemplate_0": "You are an assistant"
|
* "chatPromptTemplate_0": "You are an assistant"
|
||||||
* }
|
* }
|
||||||
* 4. If typeof config's value is a string, check if the parameter is enabled
|
* 5. If typeof config's value is a string, check if the parameter is enabled
|
||||||
* Example:
|
* Example:
|
||||||
* "systemMessagePrompt": "You are an assistant"
|
* "systemMessagePrompt": "You are an assistant"
|
||||||
*/
|
*/
|
||||||
|
|
@ -1129,6 +1130,12 @@ export const replaceInputsWithConfig = (
|
||||||
}
|
}
|
||||||
overrideConfig[config] = filteredVars
|
overrideConfig[config] = filteredVars
|
||||||
}
|
}
|
||||||
|
} else if (Array.isArray(overrideConfig[config])) {
|
||||||
|
// Handle arrays as direct parameter values
|
||||||
|
if (isParameterEnabled(flowNodeData.label, config)) {
|
||||||
|
inputsObj[config] = overrideConfig[config]
|
||||||
|
}
|
||||||
|
continue
|
||||||
} else if (overrideConfig[config] && typeof overrideConfig[config] === 'object') {
|
} else if (overrideConfig[config] && typeof overrideConfig[config] === 'object') {
|
||||||
const nodeIds = Object.keys(overrideConfig[config])
|
const nodeIds = Object.keys(overrideConfig[config])
|
||||||
if (nodeIds.includes(flowNodeData.id)) {
|
if (nodeIds.includes(flowNodeData.id)) {
|
||||||
|
|
@ -1352,6 +1359,48 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component
|
||||||
schema: arraySchema
|
schema: arraySchema
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else if (inputParam.loadConfig) {
|
||||||
|
const configData = flowNode?.data?.inputs?.[`${inputParam.name}Config`]
|
||||||
|
if (configData) {
|
||||||
|
// Parse config data to extract schema
|
||||||
|
let parsedConfig: any = {}
|
||||||
|
try {
|
||||||
|
parsedConfig = typeof configData === 'string' ? JSON.parse(configData) : configData
|
||||||
|
} catch (e) {
|
||||||
|
// If parsing fails, treat as empty object
|
||||||
|
parsedConfig = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate schema from config structure
|
||||||
|
const configSchema: Record<string, string> = {}
|
||||||
|
parsedConfig = _removeCredentialId(parsedConfig)
|
||||||
|
for (const key in parsedConfig) {
|
||||||
|
if (key === inputParam.name) continue
|
||||||
|
const value = parsedConfig[key]
|
||||||
|
let fieldType = 'string' // default type
|
||||||
|
|
||||||
|
if (typeof value === 'boolean') {
|
||||||
|
fieldType = 'boolean'
|
||||||
|
} else if (typeof value === 'number') {
|
||||||
|
fieldType = 'number'
|
||||||
|
} else if (Array.isArray(value)) {
|
||||||
|
fieldType = 'array'
|
||||||
|
} else if (typeof value === 'object' && value !== null) {
|
||||||
|
fieldType = 'object'
|
||||||
|
}
|
||||||
|
|
||||||
|
configSchema[key] = fieldType
|
||||||
|
}
|
||||||
|
|
||||||
|
obj = {
|
||||||
|
node: flowNode.data.label,
|
||||||
|
nodeId: flowNode.data.id,
|
||||||
|
label: `${inputParam.label} Config`,
|
||||||
|
name: `${inputParam.name}Config`,
|
||||||
|
type: `json`,
|
||||||
|
schema: configSchema
|
||||||
|
}
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
obj = {
|
obj = {
|
||||||
node: flowNode.data.label,
|
node: flowNode.data.label,
|
||||||
|
|
@ -1930,3 +1979,48 @@ export const getAllNodesInPath = (startNode: string, graph: INodeDirectedGraph):
|
||||||
|
|
||||||
return Array.from(nodes)
|
return Array.from(nodes)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const _removeCredentialId = (obj: any): any => {
|
||||||
|
if (!obj || typeof obj !== 'object') return obj
|
||||||
|
|
||||||
|
if (Array.isArray(obj)) {
|
||||||
|
return obj.map((item) => _removeCredentialId(item))
|
||||||
|
}
|
||||||
|
|
||||||
|
const newObj: Record<string, any> = {}
|
||||||
|
for (const [key, value] of Object.entries(obj)) {
|
||||||
|
if (key === 'FLOWISE_CREDENTIAL_ID') continue
|
||||||
|
newObj[key] = _removeCredentialId(value)
|
||||||
|
}
|
||||||
|
return newObj
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates that history items follow the expected schema
|
||||||
|
* @param {any[]} history - Array of history items to validate
|
||||||
|
* @returns {boolean} - True if all items are valid, false otherwise
|
||||||
|
*/
|
||||||
|
export const validateHistorySchema = (history: any[]): boolean => {
|
||||||
|
if (!Array.isArray(history)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return history.every((item) => {
|
||||||
|
// Check if item is an object
|
||||||
|
if (typeof item !== 'object' || item === null) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if role exists and is valid
|
||||||
|
if (typeof item.role !== 'string' || !['apiMessage', 'userMessage'].includes(item.role)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if content exists and is a string
|
||||||
|
if (typeof item.content !== 'string') {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -48,7 +48,10 @@ const OverrideConfigTable = ({ columns, onToggle, rows, sx }) => {
|
||||||
return <SwitchInput onChange={(enabled) => handleChange(enabled, row)} value={row.enabled} />
|
return <SwitchInput onChange={(enabled) => handleChange(enabled, row)} value={row.enabled} />
|
||||||
} else if (key === 'type' && row.schema) {
|
} else if (key === 'type' && row.schema) {
|
||||||
// If there's schema information, add a tooltip
|
// If there's schema information, add a tooltip
|
||||||
const schemaContent =
|
let schemaContent
|
||||||
|
if (Array.isArray(row.schema)) {
|
||||||
|
// Handle array format: [{ name: "field", type: "string" }, ...]
|
||||||
|
schemaContent =
|
||||||
'[<br>' +
|
'[<br>' +
|
||||||
row.schema
|
row.schema
|
||||||
.map(
|
.map(
|
||||||
|
|
@ -63,6 +66,12 @@ const OverrideConfigTable = ({ columns, onToggle, rows, sx }) => {
|
||||||
)
|
)
|
||||||
.join(',<br>') +
|
.join(',<br>') +
|
||||||
'<br>]'
|
'<br>]'
|
||||||
|
} else if (typeof row.schema === 'object' && row.schema !== null) {
|
||||||
|
// Handle object format: { "field": "string", "field2": "number", ... }
|
||||||
|
schemaContent = JSON.stringify(row.schema, null, 2).replace(/\n/g, '<br>').replace(/ /g, ' ')
|
||||||
|
} else {
|
||||||
|
schemaContent = 'No schema available'
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Stack direction='row' alignItems='center' spacing={1}>
|
<Stack direction='row' alignItems='center' spacing={1}>
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,10 @@ export const TableViewOnly = ({ columns, rows, sx }) => {
|
||||||
return row[key] ? <Chip label='Enabled' color='primary' /> : <Chip label='Disabled' />
|
return row[key] ? <Chip label='Enabled' color='primary' /> : <Chip label='Disabled' />
|
||||||
} else if (key === 'type' && row.schema) {
|
} else if (key === 'type' && row.schema) {
|
||||||
// If there's schema information, add a tooltip
|
// If there's schema information, add a tooltip
|
||||||
const schemaContent =
|
let schemaContent
|
||||||
|
if (Array.isArray(row.schema)) {
|
||||||
|
// Handle array format: [{ name: "field", type: "string" }, ...]
|
||||||
|
schemaContent =
|
||||||
'[<br>' +
|
'[<br>' +
|
||||||
row.schema
|
row.schema
|
||||||
.map(
|
.map(
|
||||||
|
|
@ -26,6 +29,12 @@ export const TableViewOnly = ({ columns, rows, sx }) => {
|
||||||
)
|
)
|
||||||
.join(',<br>') +
|
.join(',<br>') +
|
||||||
'<br>]'
|
'<br>]'
|
||||||
|
} else if (typeof row.schema === 'object' && row.schema !== null) {
|
||||||
|
// Handle object format: { "field": "string", "field2": "number", ... }
|
||||||
|
schemaContent = JSON.stringify(row.schema, null, 2).replace(/\n/g, '<br>').replace(/ /g, ' ')
|
||||||
|
} else {
|
||||||
|
schemaContent = 'No schema available'
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Stack direction='row' alignItems='center' spacing={1}>
|
<Stack direction='row' alignItems='center' spacing={1}>
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue