Bugfix/Escape JSON in Prompt Message (#3901)
add fix to only get variables when there is no colon
This commit is contained in:
parent
4c9d46d7e5
commit
4aa97b0c9a
|
|
@ -7,7 +7,7 @@ import { AgentStep } from '@langchain/core/agents'
|
||||||
import { renderTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, PromptTemplate } from '@langchain/core/prompts'
|
import { renderTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, PromptTemplate } from '@langchain/core/prompts'
|
||||||
import { RunnableSequence } from '@langchain/core/runnables'
|
import { RunnableSequence } from '@langchain/core/runnables'
|
||||||
import { ChatConversationalAgent } from 'langchain/agents'
|
import { ChatConversationalAgent } from 'langchain/agents'
|
||||||
import { getBaseClasses } from '../../../src/utils'
|
import { getBaseClasses, transformBracesWithColon } from '../../../src/utils'
|
||||||
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
|
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
|
||||||
import {
|
import {
|
||||||
IVisionChatModal,
|
IVisionChatModal,
|
||||||
|
|
@ -218,7 +218,7 @@ const prepareAgent = async (
|
||||||
let tools = nodeData.inputs?.tools as Tool[]
|
let tools = nodeData.inputs?.tools as Tool[]
|
||||||
tools = flatten(tools)
|
tools = flatten(tools)
|
||||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
let systemMessage = nodeData.inputs?.systemMessage as string
|
||||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||||
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
||||||
const prependMessages = options?.prependMessages
|
const prependMessages = options?.prependMessages
|
||||||
|
|
@ -228,6 +228,8 @@ const prepareAgent = async (
|
||||||
toolNames: tools.map((tool) => tool.name)
|
toolNames: tools.map((tool) => tool.name)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
systemMessage = transformBracesWithColon(systemMessage)
|
||||||
|
|
||||||
const prompt = ChatConversationalAgent.createPrompt(tools, {
|
const prompt = ChatConversationalAgent.createPrompt(tools, {
|
||||||
systemMessage: systemMessage ? systemMessage : DEFAULT_PREFIX,
|
systemMessage: systemMessage ? systemMessage : DEFAULT_PREFIX,
|
||||||
outputParser
|
outputParser
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ import { RunnableSequence } from '@langchain/core/runnables'
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
||||||
import { ChatPromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, PromptTemplate } from '@langchain/core/prompts'
|
import { ChatPromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, PromptTemplate } from '@langchain/core/prompts'
|
||||||
import { formatToOpenAIToolMessages } from 'langchain/agents/format_scratchpad/openai_tools'
|
import { formatToOpenAIToolMessages } from 'langchain/agents/format_scratchpad/openai_tools'
|
||||||
import { getBaseClasses } from '../../../src/utils'
|
import { getBaseClasses, transformBracesWithColon } from '../../../src/utils'
|
||||||
import { type ToolsAgentStep } from 'langchain/agents/openai/output_parser'
|
import { type ToolsAgentStep } from 'langchain/agents/openai/output_parser'
|
||||||
import {
|
import {
|
||||||
FlowiseMemory,
|
FlowiseMemory,
|
||||||
|
|
@ -212,13 +212,15 @@ const prepareAgent = async (
|
||||||
const model = nodeData.inputs?.model as BaseChatModel
|
const model = nodeData.inputs?.model as BaseChatModel
|
||||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
let systemMessage = nodeData.inputs?.systemMessage as string
|
||||||
let tools = nodeData.inputs?.tools
|
let tools = nodeData.inputs?.tools
|
||||||
tools = flatten(tools)
|
tools = flatten(tools)
|
||||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||||
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
||||||
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever
|
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever
|
||||||
|
|
||||||
|
systemMessage = transformBracesWithColon(systemMessage)
|
||||||
|
|
||||||
const prompt = ChatPromptTemplate.fromMessages([
|
const prompt = ChatPromptTemplate.fromMessages([
|
||||||
['system', systemMessage ? systemMessage : `You are a helpful AI assistant.`],
|
['system', systemMessage ? systemMessage : `You are a helpful AI assistant.`],
|
||||||
new MessagesPlaceholder(memoryKey),
|
new MessagesPlaceholder(memoryKey),
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,13 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
||||||
import { ChatPromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, PromptTemplate } from '@langchain/core/prompts'
|
import { ChatPromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, PromptTemplate } from '@langchain/core/prompts'
|
||||||
import { formatToOpenAIToolMessages } from 'langchain/agents/format_scratchpad/openai_tools'
|
import { formatToOpenAIToolMessages } from 'langchain/agents/format_scratchpad/openai_tools'
|
||||||
import { type ToolsAgentStep } from 'langchain/agents/openai/output_parser'
|
import { type ToolsAgentStep } from 'langchain/agents/openai/output_parser'
|
||||||
import { extractOutputFromArray, getBaseClasses, handleEscapeCharacters, removeInvalidImageMarkdown } from '../../../src/utils'
|
import {
|
||||||
|
extractOutputFromArray,
|
||||||
|
getBaseClasses,
|
||||||
|
handleEscapeCharacters,
|
||||||
|
removeInvalidImageMarkdown,
|
||||||
|
transformBracesWithColon
|
||||||
|
} from '../../../src/utils'
|
||||||
import {
|
import {
|
||||||
FlowiseMemory,
|
FlowiseMemory,
|
||||||
ICommonObject,
|
ICommonObject,
|
||||||
|
|
@ -236,13 +242,15 @@ const prepareAgent = async (
|
||||||
const model = nodeData.inputs?.model as BaseChatModel
|
const model = nodeData.inputs?.model as BaseChatModel
|
||||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
let systemMessage = nodeData.inputs?.systemMessage as string
|
||||||
let tools = nodeData.inputs?.tools
|
let tools = nodeData.inputs?.tools
|
||||||
tools = flatten(tools)
|
tools = flatten(tools)
|
||||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||||
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
||||||
const prependMessages = options?.prependMessages
|
const prependMessages = options?.prependMessages
|
||||||
|
|
||||||
|
systemMessage = transformBracesWithColon(systemMessage)
|
||||||
|
|
||||||
let prompt = ChatPromptTemplate.fromMessages([
|
let prompt = ChatPromptTemplate.fromMessages([
|
||||||
['system', systemMessage],
|
['system', systemMessage],
|
||||||
new MessagesPlaceholder(memoryKey),
|
new MessagesPlaceholder(memoryKey),
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ import { RunnableSequence } from '@langchain/core/runnables'
|
||||||
import { Tool } from '@langchain/core/tools'
|
import { Tool } from '@langchain/core/tools'
|
||||||
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
|
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
|
||||||
import { formatLogToMessage } from 'langchain/agents/format_scratchpad/log_to_message'
|
import { formatLogToMessage } from 'langchain/agents/format_scratchpad/log_to_message'
|
||||||
import { getBaseClasses } from '../../../src/utils'
|
import { getBaseClasses, transformBracesWithColon } from '../../../src/utils'
|
||||||
import {
|
import {
|
||||||
FlowiseMemory,
|
FlowiseMemory,
|
||||||
ICommonObject,
|
ICommonObject,
|
||||||
|
|
@ -222,13 +222,15 @@ const prepareAgent = async (
|
||||||
const model = nodeData.inputs?.model as BaseChatModel
|
const model = nodeData.inputs?.model as BaseChatModel
|
||||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
let systemMessage = nodeData.inputs?.systemMessage as string
|
||||||
let tools = nodeData.inputs?.tools
|
let tools = nodeData.inputs?.tools
|
||||||
tools = flatten(tools)
|
tools = flatten(tools)
|
||||||
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
||||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||||
const prependMessages = options?.prependMessages
|
const prependMessages = options?.prependMessages
|
||||||
|
|
||||||
|
systemMessage = transformBracesWithColon(systemMessage)
|
||||||
|
|
||||||
let promptMessage = systemMessage ? systemMessage : defaultSystemMessage
|
let promptMessage = systemMessage ? systemMessage : defaultSystemMessage
|
||||||
if (memory.memoryKey) promptMessage = promptMessage.replaceAll('{chat_history}', `{${memory.memoryKey}}`)
|
if (memory.memoryKey) promptMessage = promptMessage.replaceAll('{chat_history}', `{${memory.memoryKey}}`)
|
||||||
if (memory.inputKey) promptMessage = promptMessage.replaceAll('{input}', `{${memory.inputKey}}`)
|
if (memory.inputKey) promptMessage = promptMessage.replaceAll('{input}', `{${memory.inputKey}}`)
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,7 @@ import {
|
||||||
IServerSideEventStreamer
|
IServerSideEventStreamer
|
||||||
} from '../../../src/Interface'
|
} from '../../../src/Interface'
|
||||||
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
|
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
|
||||||
import { getBaseClasses, handleEscapeCharacters } from '../../../src/utils'
|
import { getBaseClasses, handleEscapeCharacters, transformBracesWithColon } from '../../../src/utils'
|
||||||
|
|
||||||
let systemMessage = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.`
|
let systemMessage = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.`
|
||||||
const inputKey = 'input'
|
const inputKey = 'input'
|
||||||
|
|
@ -170,7 +170,8 @@ class ConversationChain_Chains implements INode {
|
||||||
|
|
||||||
const prepareChatPrompt = (nodeData: INodeData, humanImageMessages: MessageContentImageUrl[]) => {
|
const prepareChatPrompt = (nodeData: INodeData, humanImageMessages: MessageContentImageUrl[]) => {
|
||||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||||
const prompt = nodeData.inputs?.systemMessagePrompt as string
|
let prompt = nodeData.inputs?.systemMessagePrompt as string
|
||||||
|
prompt = transformBracesWithColon(prompt)
|
||||||
const chatPromptTemplate = nodeData.inputs?.chatPromptTemplate as ChatPromptTemplate
|
const chatPromptTemplate = nodeData.inputs?.chatPromptTemplate as ChatPromptTemplate
|
||||||
let model = nodeData.inputs?.model as BaseChatModel
|
let model = nodeData.inputs?.model as BaseChatModel
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ import { SqlDatabaseChain, SqlDatabaseChainInput, DEFAULT_SQL_DATABASE_PROMPT }
|
||||||
import { SqlDatabase } from 'langchain/sql_db'
|
import { SqlDatabase } from 'langchain/sql_db'
|
||||||
import { ICommonObject, INode, INodeData, INodeParams, IServerSideEventStreamer } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeParams, IServerSideEventStreamer } from '../../../src/Interface'
|
||||||
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
|
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
|
||||||
import { getBaseClasses, getInputVariables } from '../../../src/utils'
|
import { getBaseClasses, getInputVariables, transformBracesWithColon } from '../../../src/utils'
|
||||||
import { checkInputs, Moderation, streamResponse } from '../../moderation/Moderation'
|
import { checkInputs, Moderation, streamResponse } from '../../moderation/Moderation'
|
||||||
import { formatResponse } from '../../outputparsers/OutputParserHelpers'
|
import { formatResponse } from '../../outputparsers/OutputParserHelpers'
|
||||||
|
|
||||||
|
|
@ -247,6 +247,7 @@ const getSQLDBChain = async (
|
||||||
}
|
}
|
||||||
|
|
||||||
if (customPrompt) {
|
if (customPrompt) {
|
||||||
|
customPrompt = transformBracesWithColon(customPrompt)
|
||||||
const options: PromptTemplateInput = {
|
const options: PromptTemplateInput = {
|
||||||
template: customPrompt,
|
template: customPrompt,
|
||||||
inputVariables: getInputVariables(customPrompt)
|
inputVariables: getInputVariables(customPrompt)
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeParams } from '../../../src/Interface'
|
import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||||
import { getBaseClasses } from '../../../src/utils'
|
import { getBaseClasses, transformBracesWithColon } from '../../../src/utils'
|
||||||
import { ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate } from '@langchain/core/prompts'
|
import { ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate } from '@langchain/core/prompts'
|
||||||
import { getVM } from '../../sequentialagents/commonUtils'
|
import { getVM } from '../../sequentialagents/commonUtils'
|
||||||
import { DataSource } from 'typeorm'
|
import { DataSource } from 'typeorm'
|
||||||
|
|
@ -98,14 +98,17 @@ class ChatPromptTemplate_Prompts implements INode {
|
||||||
}
|
}
|
||||||
|
|
||||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||||
const systemMessagePrompt = nodeData.inputs?.systemMessagePrompt as string
|
let systemMessagePrompt = nodeData.inputs?.systemMessagePrompt as string
|
||||||
const humanMessagePrompt = nodeData.inputs?.humanMessagePrompt as string
|
let humanMessagePrompt = nodeData.inputs?.humanMessagePrompt as string
|
||||||
const promptValuesStr = nodeData.inputs?.promptValues
|
const promptValuesStr = nodeData.inputs?.promptValues
|
||||||
const tabIdentifier = nodeData.inputs?.[`${TAB_IDENTIFIER}_${nodeData.id}`] as string
|
const tabIdentifier = nodeData.inputs?.[`${TAB_IDENTIFIER}_${nodeData.id}`] as string
|
||||||
const selectedTab = tabIdentifier ? tabIdentifier.split(`_${nodeData.id}`)[0] : 'messageHistoryCode'
|
const selectedTab = tabIdentifier ? tabIdentifier.split(`_${nodeData.id}`)[0] : 'messageHistoryCode'
|
||||||
const messageHistoryCode = nodeData.inputs?.messageHistoryCode
|
const messageHistoryCode = nodeData.inputs?.messageHistoryCode
|
||||||
const messageHistory = nodeData.inputs?.messageHistory
|
const messageHistory = nodeData.inputs?.messageHistory
|
||||||
|
|
||||||
|
systemMessagePrompt = transformBracesWithColon(systemMessagePrompt)
|
||||||
|
humanMessagePrompt = transformBracesWithColon(humanMessagePrompt)
|
||||||
|
|
||||||
let prompt = ChatPromptTemplate.fromMessages([
|
let prompt = ChatPromptTemplate.fromMessages([
|
||||||
SystemMessagePromptTemplate.fromTemplate(systemMessagePrompt),
|
SystemMessagePromptTemplate.fromTemplate(systemMessagePrompt),
|
||||||
HumanMessagePromptTemplate.fromTemplate(humanMessagePrompt)
|
HumanMessagePromptTemplate.fromTemplate(humanMessagePrompt)
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface'
|
||||||
import { getBaseClasses, getCredentialData, getCredentialParam, getInputVariables } from '../../../src/utils'
|
import { getBaseClasses, getCredentialData, getCredentialParam, getInputVariables, transformBracesWithColon } from '../../../src/utils'
|
||||||
import { PromptTemplateInput } from '@langchain/core/prompts'
|
import { PromptTemplateInput } from '@langchain/core/prompts'
|
||||||
import { Langfuse } from 'langfuse'
|
import { Langfuse } from 'langfuse'
|
||||||
|
|
||||||
|
|
@ -64,7 +64,7 @@ class PromptLangfuse_Prompts implements INode {
|
||||||
})
|
})
|
||||||
|
|
||||||
const langfusePrompt = await langfuse.getPrompt(nodeData.inputs?.template as string)
|
const langfusePrompt = await langfuse.getPrompt(nodeData.inputs?.template as string)
|
||||||
const template = langfusePrompt.getLangchainPrompt()
|
let template = langfusePrompt.getLangchainPrompt()
|
||||||
|
|
||||||
const promptValuesStr = nodeData.inputs?.promptValues
|
const promptValuesStr = nodeData.inputs?.promptValues
|
||||||
|
|
||||||
|
|
@ -78,6 +78,7 @@ class PromptLangfuse_Prompts implements INode {
|
||||||
}
|
}
|
||||||
|
|
||||||
const inputVariables = getInputVariables(template)
|
const inputVariables = getInputVariables(template)
|
||||||
|
template = transformBracesWithColon(template)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const options: PromptTemplateInput = {
|
const options: PromptTemplateInput = {
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface'
|
||||||
import { getBaseClasses, getInputVariables } from '../../../src/utils'
|
import { getBaseClasses, getInputVariables, transformBracesWithColon } from '../../../src/utils'
|
||||||
import { PromptTemplateInput } from '@langchain/core/prompts'
|
import { PromptTemplateInput } from '@langchain/core/prompts'
|
||||||
|
|
||||||
class PromptTemplate_Prompts implements INode {
|
class PromptTemplate_Prompts implements INode {
|
||||||
|
|
@ -42,7 +42,7 @@ class PromptTemplate_Prompts implements INode {
|
||||||
}
|
}
|
||||||
|
|
||||||
async init(nodeData: INodeData): Promise<any> {
|
async init(nodeData: INodeData): Promise<any> {
|
||||||
const template = nodeData.inputs?.template as string
|
let template = nodeData.inputs?.template as string
|
||||||
const promptValuesStr = nodeData.inputs?.promptValues
|
const promptValuesStr = nodeData.inputs?.promptValues
|
||||||
|
|
||||||
let promptValues: ICommonObject = {}
|
let promptValues: ICommonObject = {}
|
||||||
|
|
@ -55,6 +55,7 @@ class PromptTemplate_Prompts implements INode {
|
||||||
}
|
}
|
||||||
|
|
||||||
const inputVariables = getInputVariables(template)
|
const inputVariables = getInputVariables(template)
|
||||||
|
template = transformBracesWithColon(template)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const options: PromptTemplateInput = {
|
const options: PromptTemplateInput = {
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { transformBracesWithColon } from '../../../src'
|
||||||
import { INode, INodeData, INodeParams, PromptRetriever, PromptRetrieverInput } from '../../../src/Interface'
|
import { INode, INodeData, INodeParams, PromptRetriever, PromptRetrieverInput } from '../../../src/Interface'
|
||||||
|
|
||||||
class PromptRetriever_Retrievers implements INode {
|
class PromptRetriever_Retrievers implements INode {
|
||||||
|
|
@ -48,7 +49,8 @@ class PromptRetriever_Retrievers implements INode {
|
||||||
async init(nodeData: INodeData): Promise<any> {
|
async init(nodeData: INodeData): Promise<any> {
|
||||||
const name = nodeData.inputs?.name as string
|
const name = nodeData.inputs?.name as string
|
||||||
const description = nodeData.inputs?.description as string
|
const description = nodeData.inputs?.description as string
|
||||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
let systemMessage = nodeData.inputs?.systemMessage as string
|
||||||
|
systemMessage = transformBracesWithColon(systemMessage)
|
||||||
|
|
||||||
const obj = {
|
const obj = {
|
||||||
name,
|
name,
|
||||||
|
|
|
||||||
|
|
@ -29,7 +29,8 @@ import {
|
||||||
getVars,
|
getVars,
|
||||||
handleEscapeCharacters,
|
handleEscapeCharacters,
|
||||||
prepareSandboxVars,
|
prepareSandboxVars,
|
||||||
removeInvalidImageMarkdown
|
removeInvalidImageMarkdown,
|
||||||
|
transformBracesWithColon
|
||||||
} from '../../../src/utils'
|
} from '../../../src/utils'
|
||||||
import {
|
import {
|
||||||
customGet,
|
customGet,
|
||||||
|
|
@ -456,7 +457,9 @@ class Agent_SeqAgents implements INode {
|
||||||
let tools = nodeData.inputs?.tools
|
let tools = nodeData.inputs?.tools
|
||||||
tools = flatten(tools)
|
tools = flatten(tools)
|
||||||
let agentSystemPrompt = nodeData.inputs?.systemMessagePrompt as string
|
let agentSystemPrompt = nodeData.inputs?.systemMessagePrompt as string
|
||||||
|
agentSystemPrompt = transformBracesWithColon(agentSystemPrompt)
|
||||||
let agentHumanPrompt = nodeData.inputs?.humanMessagePrompt as string
|
let agentHumanPrompt = nodeData.inputs?.humanMessagePrompt as string
|
||||||
|
agentHumanPrompt = transformBracesWithColon(agentHumanPrompt)
|
||||||
const agentLabel = nodeData.inputs?.agentName as string
|
const agentLabel = nodeData.inputs?.agentName as string
|
||||||
const sequentialNodes = nodeData.inputs?.sequentialNode as ISeqAgentNode[]
|
const sequentialNodes = nodeData.inputs?.sequentialNode as ISeqAgentNode[]
|
||||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ import {
|
||||||
ISeqAgentNode,
|
ISeqAgentNode,
|
||||||
ISeqAgentsState
|
ISeqAgentsState
|
||||||
} from '../../../src/Interface'
|
} from '../../../src/Interface'
|
||||||
import { getInputVariables, getVars, handleEscapeCharacters, prepareSandboxVars } from '../../../src/utils'
|
import { getInputVariables, getVars, handleEscapeCharacters, prepareSandboxVars, transformBracesWithColon } from '../../../src/utils'
|
||||||
import {
|
import {
|
||||||
ExtractTool,
|
ExtractTool,
|
||||||
checkCondition,
|
checkCondition,
|
||||||
|
|
@ -388,7 +388,9 @@ class ConditionAgent_SeqAgents implements INode {
|
||||||
const output = nodeData.outputs?.output as string
|
const output = nodeData.outputs?.output as string
|
||||||
const sequentialNodes = nodeData.inputs?.sequentialNode as ISeqAgentNode[]
|
const sequentialNodes = nodeData.inputs?.sequentialNode as ISeqAgentNode[]
|
||||||
let agentPrompt = nodeData.inputs?.systemMessagePrompt as string
|
let agentPrompt = nodeData.inputs?.systemMessagePrompt as string
|
||||||
|
agentPrompt = transformBracesWithColon(agentPrompt)
|
||||||
let humanPrompt = nodeData.inputs?.humanMessagePrompt as string
|
let humanPrompt = nodeData.inputs?.humanMessagePrompt as string
|
||||||
|
humanPrompt = transformBracesWithColon(humanPrompt)
|
||||||
const promptValuesStr = nodeData.inputs?.promptValues
|
const promptValuesStr = nodeData.inputs?.promptValues
|
||||||
const conditionAgentStructuredOutput = nodeData.inputs?.conditionAgentStructuredOutput
|
const conditionAgentStructuredOutput = nodeData.inputs?.conditionAgentStructuredOutput
|
||||||
const model = nodeData.inputs?.model as BaseChatModel
|
const model = nodeData.inputs?.model as BaseChatModel
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,14 @@ import {
|
||||||
ConversationHistorySelection
|
ConversationHistorySelection
|
||||||
} from '../../../src/Interface'
|
} from '../../../src/Interface'
|
||||||
import { AgentExecutor } from '../../../src/agents'
|
import { AgentExecutor } from '../../../src/agents'
|
||||||
import { extractOutputFromArray, getInputVariables, getVars, handleEscapeCharacters, prepareSandboxVars } from '../../../src/utils'
|
import {
|
||||||
|
extractOutputFromArray,
|
||||||
|
getInputVariables,
|
||||||
|
getVars,
|
||||||
|
handleEscapeCharacters,
|
||||||
|
prepareSandboxVars,
|
||||||
|
transformBracesWithColon
|
||||||
|
} from '../../../src/utils'
|
||||||
import {
|
import {
|
||||||
ExtractTool,
|
ExtractTool,
|
||||||
convertStructuredSchemaToZod,
|
convertStructuredSchemaToZod,
|
||||||
|
|
@ -388,7 +395,9 @@ class LLMNode_SeqAgents implements INode {
|
||||||
tools = flatten(tools)
|
tools = flatten(tools)
|
||||||
|
|
||||||
let systemPrompt = nodeData.inputs?.systemMessagePrompt as string
|
let systemPrompt = nodeData.inputs?.systemMessagePrompt as string
|
||||||
|
systemPrompt = transformBracesWithColon(systemPrompt)
|
||||||
let humanPrompt = nodeData.inputs?.humanMessagePrompt as string
|
let humanPrompt = nodeData.inputs?.humanMessagePrompt as string
|
||||||
|
humanPrompt = transformBracesWithColon(humanPrompt)
|
||||||
const llmNodeLabel = nodeData.inputs?.llmNodeName as string
|
const llmNodeLabel = nodeData.inputs?.llmNodeName as string
|
||||||
const sequentialNodes = nodeData.inputs?.sequentialNode as ISeqAgentNode[]
|
const sequentialNodes = nodeData.inputs?.sequentialNode as ISeqAgentNode[]
|
||||||
const model = nodeData.inputs?.model as BaseChatModel
|
const model = nodeData.inputs?.model as BaseChatModel
|
||||||
|
|
|
||||||
|
|
@ -271,7 +271,7 @@ export const getInputVariables = (paramValue: string): string[] => {
|
||||||
const variableStartIdx = variableStack[variableStack.length - 1].startIdx
|
const variableStartIdx = variableStack[variableStack.length - 1].startIdx
|
||||||
const variableEndIdx = startIdx
|
const variableEndIdx = startIdx
|
||||||
const variableFullPath = returnVal.substring(variableStartIdx, variableEndIdx)
|
const variableFullPath = returnVal.substring(variableStartIdx, variableEndIdx)
|
||||||
inputVariables.push(variableFullPath)
|
if (!variableFullPath.includes(':')) inputVariables.push(variableFullPath)
|
||||||
variableStack.pop()
|
variableStack.pop()
|
||||||
}
|
}
|
||||||
startIdx += 1
|
startIdx += 1
|
||||||
|
|
@ -279,6 +279,31 @@ export const getInputVariables = (paramValue: string): string[] => {
|
||||||
return inputVariables
|
return inputVariables
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform curly braces into double curly braces if the content includes a colon.
|
||||||
|
* @param input - The original string that may contain { ... } segments.
|
||||||
|
* @returns The transformed string, where { ... } containing a colon has been replaced with {{ ... }}.
|
||||||
|
*/
|
||||||
|
export const transformBracesWithColon = (input: string): string => {
|
||||||
|
// This regex will match anything of the form `{ ... }` (no nested braces).
|
||||||
|
// `[^{}]*` means: match any characters that are not `{` or `}` zero or more times.
|
||||||
|
const regex = /\{([^{}]*?)\}/g
|
||||||
|
|
||||||
|
return input.replace(regex, (match, groupContent) => {
|
||||||
|
// groupContent is the text inside the braces `{ ... }`.
|
||||||
|
|
||||||
|
if (groupContent.includes(':')) {
|
||||||
|
// If there's a colon in the content, we turn { ... } into {{ ... }}
|
||||||
|
// The match is the full string like: "{ answer: hello }"
|
||||||
|
// groupContent is the inner part like: " answer: hello "
|
||||||
|
return `{{${groupContent}}}`
|
||||||
|
} else {
|
||||||
|
// Otherwise, leave it as is
|
||||||
|
return match
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Crawl all available urls given a domain url and limit
|
* Crawl all available urls given a domain url and limit
|
||||||
* @param {string} url
|
* @param {string} url
|
||||||
|
|
|
||||||
|
|
@ -569,32 +569,24 @@ export const generateRandomGradient = () => {
|
||||||
return gradient
|
return gradient
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getInputVariables = (paramValue) => {
|
export const getInputVariables = (input) => {
|
||||||
let returnVal = paramValue
|
// This regex will match single curly-braced substrings
|
||||||
const variableStack = []
|
const pattern = /\{([^{}]+)\}/g
|
||||||
const inputVariables = []
|
const results = []
|
||||||
let startIdx = 0
|
|
||||||
const endIdx = returnVal.length
|
|
||||||
|
|
||||||
while (startIdx < endIdx) {
|
let match
|
||||||
const substr = returnVal.substring(startIdx, startIdx + 1)
|
|
||||||
|
|
||||||
// Store the opening double curly bracket
|
while ((match = pattern.exec(input)) !== null) {
|
||||||
if (substr === '{') {
|
const inside = match[1].trim()
|
||||||
variableStack.push({ substr, startIdx: startIdx + 1 })
|
|
||||||
|
// Check if there's a colon
|
||||||
|
if (!inside.includes(':')) {
|
||||||
|
// If there's no colon, add to results
|
||||||
|
results.push(inside)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Found the complete variable
|
|
||||||
if (substr === '}' && variableStack.length > 0 && variableStack[variableStack.length - 1].substr === '{') {
|
|
||||||
const variableStartIdx = variableStack[variableStack.length - 1].startIdx
|
|
||||||
const variableEndIdx = startIdx
|
|
||||||
const variableFullPath = returnVal.substring(variableStartIdx, variableEndIdx)
|
|
||||||
inputVariables.push(variableFullPath)
|
|
||||||
variableStack.pop()
|
|
||||||
}
|
|
||||||
startIdx += 1
|
|
||||||
}
|
}
|
||||||
return inputVariables
|
|
||||||
|
return results
|
||||||
}
|
}
|
||||||
|
|
||||||
export const removeDuplicateURL = (message) => {
|
export const removeDuplicateURL = (message) => {
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,16 @@ import useConfirm from '@/hooks/useConfirm'
|
||||||
// Material-UI
|
// Material-UI
|
||||||
import { IconButton, Avatar, ButtonBase, Toolbar, Box, Button, Grid, OutlinedInput, Stack, Typography } from '@mui/material'
|
import { IconButton, Avatar, ButtonBase, Toolbar, Box, Button, Grid, OutlinedInput, Stack, Typography } from '@mui/material'
|
||||||
import { useTheme } from '@mui/material/styles'
|
import { useTheme } from '@mui/material/styles'
|
||||||
import { IconCode, IconArrowLeft, IconDeviceFloppy, IconSettings, IconX, IconTrash, IconWand } from '@tabler/icons-react'
|
import {
|
||||||
|
IconCode,
|
||||||
|
IconArrowLeft,
|
||||||
|
IconDeviceFloppy,
|
||||||
|
IconSettings,
|
||||||
|
IconX,
|
||||||
|
IconTrash,
|
||||||
|
IconWand,
|
||||||
|
IconArrowsMaximize
|
||||||
|
} from '@tabler/icons-react'
|
||||||
|
|
||||||
// Project import
|
// Project import
|
||||||
import MainCard from '@/ui-component/cards/MainCard'
|
import MainCard from '@/ui-component/cards/MainCard'
|
||||||
|
|
@ -30,6 +39,7 @@ import ViewLeadsDialog from '@/ui-component/dialog/ViewLeadsDialog'
|
||||||
import Settings from '@/views/settings'
|
import Settings from '@/views/settings'
|
||||||
import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog'
|
import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog'
|
||||||
import PromptGeneratorDialog from '@/ui-component/dialog/PromptGeneratorDialog'
|
import PromptGeneratorDialog from '@/ui-component/dialog/PromptGeneratorDialog'
|
||||||
|
import ExpandTextDialog from '@/ui-component/dialog/ExpandTextDialog'
|
||||||
|
|
||||||
// API
|
// API
|
||||||
import assistantsApi from '@/api/assistants'
|
import assistantsApi from '@/api/assistants'
|
||||||
|
|
@ -101,6 +111,8 @@ const CustomAssistantConfigurePreview = () => {
|
||||||
const [isSettingsOpen, setSettingsOpen] = useState(false)
|
const [isSettingsOpen, setSettingsOpen] = useState(false)
|
||||||
const [assistantPromptGeneratorDialogOpen, setAssistantPromptGeneratorDialogOpen] = useState(false)
|
const [assistantPromptGeneratorDialogOpen, setAssistantPromptGeneratorDialogOpen] = useState(false)
|
||||||
const [assistantPromptGeneratorDialogProps, setAssistantPromptGeneratorDialogProps] = useState({})
|
const [assistantPromptGeneratorDialogProps, setAssistantPromptGeneratorDialogProps] = useState({})
|
||||||
|
const [showExpandDialog, setShowExpandDialog] = useState(false)
|
||||||
|
const [expandDialogProps, setExpandDialogProps] = useState({})
|
||||||
|
|
||||||
const [loading, setLoading] = useState(false)
|
const [loading, setLoading] = useState(false)
|
||||||
const [loadingAssistant, setLoadingAssistant] = useState(true)
|
const [loadingAssistant, setLoadingAssistant] = useState(true)
|
||||||
|
|
@ -525,6 +537,21 @@ const CustomAssistantConfigurePreview = () => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const onExpandDialogClicked = (value) => {
|
||||||
|
const dialogProps = {
|
||||||
|
value,
|
||||||
|
inputParam: {
|
||||||
|
label: 'Instructions',
|
||||||
|
name: 'instructions',
|
||||||
|
type: 'string'
|
||||||
|
},
|
||||||
|
confirmButtonName: 'Save',
|
||||||
|
cancelButtonName: 'Cancel'
|
||||||
|
}
|
||||||
|
setExpandDialogProps(dialogProps)
|
||||||
|
setShowExpandDialog(true)
|
||||||
|
}
|
||||||
|
|
||||||
const generateDocStoreToolDesc = async (storeId) => {
|
const generateDocStoreToolDesc = async (storeId) => {
|
||||||
const isValid = checkInputParamsMandatory()
|
const isValid = checkInputParamsMandatory()
|
||||||
if (!isValid) {
|
if (!isValid) {
|
||||||
|
|
@ -955,6 +982,18 @@ const CustomAssistantConfigurePreview = () => {
|
||||||
Instructions<span style={{ color: 'red' }}> *</span>
|
Instructions<span style={{ color: 'red' }}> *</span>
|
||||||
</Typography>
|
</Typography>
|
||||||
<div style={{ flex: 1 }}></div>
|
<div style={{ flex: 1 }}></div>
|
||||||
|
<IconButton
|
||||||
|
size='small'
|
||||||
|
sx={{
|
||||||
|
height: 25,
|
||||||
|
width: 25
|
||||||
|
}}
|
||||||
|
title='Expand'
|
||||||
|
color='secondary'
|
||||||
|
onClick={() => onExpandDialogClicked(customAssistantInstruction)}
|
||||||
|
>
|
||||||
|
<IconArrowsMaximize />
|
||||||
|
</IconButton>
|
||||||
{selectedChatModel?.name && (
|
{selectedChatModel?.name && (
|
||||||
<Button
|
<Button
|
||||||
title='Generate instructions using model'
|
title='Generate instructions using model'
|
||||||
|
|
@ -1329,6 +1368,15 @@ const CustomAssistantConfigurePreview = () => {
|
||||||
setAssistantPromptGeneratorDialogOpen(false)
|
setAssistantPromptGeneratorDialogOpen(false)
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
<ExpandTextDialog
|
||||||
|
show={showExpandDialog}
|
||||||
|
dialogProps={expandDialogProps}
|
||||||
|
onCancel={() => setShowExpandDialog(false)}
|
||||||
|
onConfirm={(newValue) => {
|
||||||
|
setCustomAssistantInstruction(newValue)
|
||||||
|
setShowExpandDialog(false)
|
||||||
|
}}
|
||||||
|
></ExpandTextDialog>
|
||||||
<ConfirmDialog />
|
<ConfirmDialog />
|
||||||
</>
|
</>
|
||||||
)
|
)
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue