Bugfix/Escape JSON in Prompt Message (#3901)
add fix to only get variables when there is no colon
This commit is contained in:
parent
4c9d46d7e5
commit
4aa97b0c9a
|
|
@ -7,7 +7,7 @@ import { AgentStep } from '@langchain/core/agents'
|
|||
import { renderTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, PromptTemplate } from '@langchain/core/prompts'
|
||||
import { RunnableSequence } from '@langchain/core/runnables'
|
||||
import { ChatConversationalAgent } from 'langchain/agents'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { getBaseClasses, transformBracesWithColon } from '../../../src/utils'
|
||||
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
|
||||
import {
|
||||
IVisionChatModal,
|
||||
|
|
@ -218,7 +218,7 @@ const prepareAgent = async (
|
|||
let tools = nodeData.inputs?.tools as Tool[]
|
||||
tools = flatten(tools)
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let systemMessage = nodeData.inputs?.systemMessage as string
|
||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
||||
const prependMessages = options?.prependMessages
|
||||
|
|
@ -228,6 +228,8 @@ const prepareAgent = async (
|
|||
toolNames: tools.map((tool) => tool.name)
|
||||
})
|
||||
|
||||
systemMessage = transformBracesWithColon(systemMessage)
|
||||
|
||||
const prompt = ChatConversationalAgent.createPrompt(tools, {
|
||||
systemMessage: systemMessage ? systemMessage : DEFAULT_PREFIX,
|
||||
outputParser
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import { RunnableSequence } from '@langchain/core/runnables'
|
|||
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
||||
import { ChatPromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, PromptTemplate } from '@langchain/core/prompts'
|
||||
import { formatToOpenAIToolMessages } from 'langchain/agents/format_scratchpad/openai_tools'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { getBaseClasses, transformBracesWithColon } from '../../../src/utils'
|
||||
import { type ToolsAgentStep } from 'langchain/agents/openai/output_parser'
|
||||
import {
|
||||
FlowiseMemory,
|
||||
|
|
@ -212,13 +212,15 @@ const prepareAgent = async (
|
|||
const model = nodeData.inputs?.model as BaseChatModel
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let tools = nodeData.inputs?.tools
|
||||
tools = flatten(tools)
|
||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
||||
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever
|
||||
|
||||
systemMessage = transformBracesWithColon(systemMessage)
|
||||
|
||||
const prompt = ChatPromptTemplate.fromMessages([
|
||||
['system', systemMessage ? systemMessage : `You are a helpful AI assistant.`],
|
||||
new MessagesPlaceholder(memoryKey),
|
||||
|
|
|
|||
|
|
@ -7,7 +7,13 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
|||
import { ChatPromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, PromptTemplate } from '@langchain/core/prompts'
|
||||
import { formatToOpenAIToolMessages } from 'langchain/agents/format_scratchpad/openai_tools'
|
||||
import { type ToolsAgentStep } from 'langchain/agents/openai/output_parser'
|
||||
import { extractOutputFromArray, getBaseClasses, handleEscapeCharacters, removeInvalidImageMarkdown } from '../../../src/utils'
|
||||
import {
|
||||
extractOutputFromArray,
|
||||
getBaseClasses,
|
||||
handleEscapeCharacters,
|
||||
removeInvalidImageMarkdown,
|
||||
transformBracesWithColon
|
||||
} from '../../../src/utils'
|
||||
import {
|
||||
FlowiseMemory,
|
||||
ICommonObject,
|
||||
|
|
@ -236,13 +242,15 @@ const prepareAgent = async (
|
|||
const model = nodeData.inputs?.model as BaseChatModel
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let tools = nodeData.inputs?.tools
|
||||
tools = flatten(tools)
|
||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
||||
const prependMessages = options?.prependMessages
|
||||
|
||||
systemMessage = transformBracesWithColon(systemMessage)
|
||||
|
||||
let prompt = ChatPromptTemplate.fromMessages([
|
||||
['system', systemMessage],
|
||||
new MessagesPlaceholder(memoryKey),
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import { RunnableSequence } from '@langchain/core/runnables'
|
|||
import { Tool } from '@langchain/core/tools'
|
||||
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
|
||||
import { formatLogToMessage } from 'langchain/agents/format_scratchpad/log_to_message'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { getBaseClasses, transformBracesWithColon } from '../../../src/utils'
|
||||
import {
|
||||
FlowiseMemory,
|
||||
ICommonObject,
|
||||
|
|
@ -222,13 +222,15 @@ const prepareAgent = async (
|
|||
const model = nodeData.inputs?.model as BaseChatModel
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let tools = nodeData.inputs?.tools
|
||||
tools = flatten(tools)
|
||||
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||
const prependMessages = options?.prependMessages
|
||||
|
||||
systemMessage = transformBracesWithColon(systemMessage)
|
||||
|
||||
let promptMessage = systemMessage ? systemMessage : defaultSystemMessage
|
||||
if (memory.memoryKey) promptMessage = promptMessage.replaceAll('{chat_history}', `{${memory.memoryKey}}`)
|
||||
if (memory.inputKey) promptMessage = promptMessage.replaceAll('{input}', `{${memory.inputKey}}`)
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ import {
|
|||
IServerSideEventStreamer
|
||||
} from '../../../src/Interface'
|
||||
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
|
||||
import { getBaseClasses, handleEscapeCharacters } from '../../../src/utils'
|
||||
import { getBaseClasses, handleEscapeCharacters, transformBracesWithColon } from '../../../src/utils'
|
||||
|
||||
let systemMessage = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.`
|
||||
const inputKey = 'input'
|
||||
|
|
@ -170,7 +170,8 @@ class ConversationChain_Chains implements INode {
|
|||
|
||||
const prepareChatPrompt = (nodeData: INodeData, humanImageMessages: MessageContentImageUrl[]) => {
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const prompt = nodeData.inputs?.systemMessagePrompt as string
|
||||
let prompt = nodeData.inputs?.systemMessagePrompt as string
|
||||
prompt = transformBracesWithColon(prompt)
|
||||
const chatPromptTemplate = nodeData.inputs?.chatPromptTemplate as ChatPromptTemplate
|
||||
let model = nodeData.inputs?.model as BaseChatModel
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import { SqlDatabaseChain, SqlDatabaseChainInput, DEFAULT_SQL_DATABASE_PROMPT }
|
|||
import { SqlDatabase } from 'langchain/sql_db'
|
||||
import { ICommonObject, INode, INodeData, INodeParams, IServerSideEventStreamer } from '../../../src/Interface'
|
||||
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
|
||||
import { getBaseClasses, getInputVariables } from '../../../src/utils'
|
||||
import { getBaseClasses, getInputVariables, transformBracesWithColon } from '../../../src/utils'
|
||||
import { checkInputs, Moderation, streamResponse } from '../../moderation/Moderation'
|
||||
import { formatResponse } from '../../outputparsers/OutputParserHelpers'
|
||||
|
||||
|
|
@ -247,6 +247,7 @@ const getSQLDBChain = async (
|
|||
}
|
||||
|
||||
if (customPrompt) {
|
||||
customPrompt = transformBracesWithColon(customPrompt)
|
||||
const options: PromptTemplateInput = {
|
||||
template: customPrompt,
|
||||
inputVariables: getInputVariables(customPrompt)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { getBaseClasses, transformBracesWithColon } from '../../../src/utils'
|
||||
import { ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate } from '@langchain/core/prompts'
|
||||
import { getVM } from '../../sequentialagents/commonUtils'
|
||||
import { DataSource } from 'typeorm'
|
||||
|
|
@ -98,14 +98,17 @@ class ChatPromptTemplate_Prompts implements INode {
|
|||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const systemMessagePrompt = nodeData.inputs?.systemMessagePrompt as string
|
||||
const humanMessagePrompt = nodeData.inputs?.humanMessagePrompt as string
|
||||
let systemMessagePrompt = nodeData.inputs?.systemMessagePrompt as string
|
||||
let humanMessagePrompt = nodeData.inputs?.humanMessagePrompt as string
|
||||
const promptValuesStr = nodeData.inputs?.promptValues
|
||||
const tabIdentifier = nodeData.inputs?.[`${TAB_IDENTIFIER}_${nodeData.id}`] as string
|
||||
const selectedTab = tabIdentifier ? tabIdentifier.split(`_${nodeData.id}`)[0] : 'messageHistoryCode'
|
||||
const messageHistoryCode = nodeData.inputs?.messageHistoryCode
|
||||
const messageHistory = nodeData.inputs?.messageHistory
|
||||
|
||||
systemMessagePrompt = transformBracesWithColon(systemMessagePrompt)
|
||||
humanMessagePrompt = transformBracesWithColon(humanMessagePrompt)
|
||||
|
||||
let prompt = ChatPromptTemplate.fromMessages([
|
||||
SystemMessagePromptTemplate.fromTemplate(systemMessagePrompt),
|
||||
HumanMessagePromptTemplate.fromTemplate(humanMessagePrompt)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam, getInputVariables } from '../../../src/utils'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam, getInputVariables, transformBracesWithColon } from '../../../src/utils'
|
||||
import { PromptTemplateInput } from '@langchain/core/prompts'
|
||||
import { Langfuse } from 'langfuse'
|
||||
|
||||
|
|
@ -64,7 +64,7 @@ class PromptLangfuse_Prompts implements INode {
|
|||
})
|
||||
|
||||
const langfusePrompt = await langfuse.getPrompt(nodeData.inputs?.template as string)
|
||||
const template = langfusePrompt.getLangchainPrompt()
|
||||
let template = langfusePrompt.getLangchainPrompt()
|
||||
|
||||
const promptValuesStr = nodeData.inputs?.promptValues
|
||||
|
||||
|
|
@ -78,6 +78,7 @@ class PromptLangfuse_Prompts implements INode {
|
|||
}
|
||||
|
||||
const inputVariables = getInputVariables(template)
|
||||
template = transformBracesWithColon(template)
|
||||
|
||||
try {
|
||||
const options: PromptTemplateInput = {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface'
|
||||
import { getBaseClasses, getInputVariables } from '../../../src/utils'
|
||||
import { getBaseClasses, getInputVariables, transformBracesWithColon } from '../../../src/utils'
|
||||
import { PromptTemplateInput } from '@langchain/core/prompts'
|
||||
|
||||
class PromptTemplate_Prompts implements INode {
|
||||
|
|
@ -42,7 +42,7 @@ class PromptTemplate_Prompts implements INode {
|
|||
}
|
||||
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
const template = nodeData.inputs?.template as string
|
||||
let template = nodeData.inputs?.template as string
|
||||
const promptValuesStr = nodeData.inputs?.promptValues
|
||||
|
||||
let promptValues: ICommonObject = {}
|
||||
|
|
@ -55,6 +55,7 @@ class PromptTemplate_Prompts implements INode {
|
|||
}
|
||||
|
||||
const inputVariables = getInputVariables(template)
|
||||
template = transformBracesWithColon(template)
|
||||
|
||||
try {
|
||||
const options: PromptTemplateInput = {
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import { transformBracesWithColon } from '../../../src'
|
||||
import { INode, INodeData, INodeParams, PromptRetriever, PromptRetrieverInput } from '../../../src/Interface'
|
||||
|
||||
class PromptRetriever_Retrievers implements INode {
|
||||
|
|
@ -48,7 +49,8 @@ class PromptRetriever_Retrievers implements INode {
|
|||
async init(nodeData: INodeData): Promise<any> {
|
||||
const name = nodeData.inputs?.name as string
|
||||
const description = nodeData.inputs?.description as string
|
||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let systemMessage = nodeData.inputs?.systemMessage as string
|
||||
systemMessage = transformBracesWithColon(systemMessage)
|
||||
|
||||
const obj = {
|
||||
name,
|
||||
|
|
|
|||
|
|
@ -29,7 +29,8 @@ import {
|
|||
getVars,
|
||||
handleEscapeCharacters,
|
||||
prepareSandboxVars,
|
||||
removeInvalidImageMarkdown
|
||||
removeInvalidImageMarkdown,
|
||||
transformBracesWithColon
|
||||
} from '../../../src/utils'
|
||||
import {
|
||||
customGet,
|
||||
|
|
@ -456,7 +457,9 @@ class Agent_SeqAgents implements INode {
|
|||
let tools = nodeData.inputs?.tools
|
||||
tools = flatten(tools)
|
||||
let agentSystemPrompt = nodeData.inputs?.systemMessagePrompt as string
|
||||
agentSystemPrompt = transformBracesWithColon(agentSystemPrompt)
|
||||
let agentHumanPrompt = nodeData.inputs?.humanMessagePrompt as string
|
||||
agentHumanPrompt = transformBracesWithColon(agentHumanPrompt)
|
||||
const agentLabel = nodeData.inputs?.agentName as string
|
||||
const sequentialNodes = nodeData.inputs?.sequentialNode as ISeqAgentNode[]
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ import {
|
|||
ISeqAgentNode,
|
||||
ISeqAgentsState
|
||||
} from '../../../src/Interface'
|
||||
import { getInputVariables, getVars, handleEscapeCharacters, prepareSandboxVars } from '../../../src/utils'
|
||||
import { getInputVariables, getVars, handleEscapeCharacters, prepareSandboxVars, transformBracesWithColon } from '../../../src/utils'
|
||||
import {
|
||||
ExtractTool,
|
||||
checkCondition,
|
||||
|
|
@ -388,7 +388,9 @@ class ConditionAgent_SeqAgents implements INode {
|
|||
const output = nodeData.outputs?.output as string
|
||||
const sequentialNodes = nodeData.inputs?.sequentialNode as ISeqAgentNode[]
|
||||
let agentPrompt = nodeData.inputs?.systemMessagePrompt as string
|
||||
agentPrompt = transformBracesWithColon(agentPrompt)
|
||||
let humanPrompt = nodeData.inputs?.humanMessagePrompt as string
|
||||
humanPrompt = transformBracesWithColon(humanPrompt)
|
||||
const promptValuesStr = nodeData.inputs?.promptValues
|
||||
const conditionAgentStructuredOutput = nodeData.inputs?.conditionAgentStructuredOutput
|
||||
const model = nodeData.inputs?.model as BaseChatModel
|
||||
|
|
|
|||
|
|
@ -18,7 +18,14 @@ import {
|
|||
ConversationHistorySelection
|
||||
} from '../../../src/Interface'
|
||||
import { AgentExecutor } from '../../../src/agents'
|
||||
import { extractOutputFromArray, getInputVariables, getVars, handleEscapeCharacters, prepareSandboxVars } from '../../../src/utils'
|
||||
import {
|
||||
extractOutputFromArray,
|
||||
getInputVariables,
|
||||
getVars,
|
||||
handleEscapeCharacters,
|
||||
prepareSandboxVars,
|
||||
transformBracesWithColon
|
||||
} from '../../../src/utils'
|
||||
import {
|
||||
ExtractTool,
|
||||
convertStructuredSchemaToZod,
|
||||
|
|
@ -388,7 +395,9 @@ class LLMNode_SeqAgents implements INode {
|
|||
tools = flatten(tools)
|
||||
|
||||
let systemPrompt = nodeData.inputs?.systemMessagePrompt as string
|
||||
systemPrompt = transformBracesWithColon(systemPrompt)
|
||||
let humanPrompt = nodeData.inputs?.humanMessagePrompt as string
|
||||
humanPrompt = transformBracesWithColon(humanPrompt)
|
||||
const llmNodeLabel = nodeData.inputs?.llmNodeName as string
|
||||
const sequentialNodes = nodeData.inputs?.sequentialNode as ISeqAgentNode[]
|
||||
const model = nodeData.inputs?.model as BaseChatModel
|
||||
|
|
|
|||
|
|
@ -271,7 +271,7 @@ export const getInputVariables = (paramValue: string): string[] => {
|
|||
const variableStartIdx = variableStack[variableStack.length - 1].startIdx
|
||||
const variableEndIdx = startIdx
|
||||
const variableFullPath = returnVal.substring(variableStartIdx, variableEndIdx)
|
||||
inputVariables.push(variableFullPath)
|
||||
if (!variableFullPath.includes(':')) inputVariables.push(variableFullPath)
|
||||
variableStack.pop()
|
||||
}
|
||||
startIdx += 1
|
||||
|
|
@ -279,6 +279,31 @@ export const getInputVariables = (paramValue: string): string[] => {
|
|||
return inputVariables
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform curly braces into double curly braces if the content includes a colon.
|
||||
* @param input - The original string that may contain { ... } segments.
|
||||
* @returns The transformed string, where { ... } containing a colon has been replaced with {{ ... }}.
|
||||
*/
|
||||
export const transformBracesWithColon = (input: string): string => {
|
||||
// This regex will match anything of the form `{ ... }` (no nested braces).
|
||||
// `[^{}]*` means: match any characters that are not `{` or `}` zero or more times.
|
||||
const regex = /\{([^{}]*?)\}/g
|
||||
|
||||
return input.replace(regex, (match, groupContent) => {
|
||||
// groupContent is the text inside the braces `{ ... }`.
|
||||
|
||||
if (groupContent.includes(':')) {
|
||||
// If there's a colon in the content, we turn { ... } into {{ ... }}
|
||||
// The match is the full string like: "{ answer: hello }"
|
||||
// groupContent is the inner part like: " answer: hello "
|
||||
return `{{${groupContent}}}`
|
||||
} else {
|
||||
// Otherwise, leave it as is
|
||||
return match
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Crawl all available urls given a domain url and limit
|
||||
* @param {string} url
|
||||
|
|
|
|||
|
|
@ -569,32 +569,24 @@ export const generateRandomGradient = () => {
|
|||
return gradient
|
||||
}
|
||||
|
||||
export const getInputVariables = (paramValue) => {
|
||||
let returnVal = paramValue
|
||||
const variableStack = []
|
||||
const inputVariables = []
|
||||
let startIdx = 0
|
||||
const endIdx = returnVal.length
|
||||
export const getInputVariables = (input) => {
|
||||
// This regex will match single curly-braced substrings
|
||||
const pattern = /\{([^{}]+)\}/g
|
||||
const results = []
|
||||
|
||||
while (startIdx < endIdx) {
|
||||
const substr = returnVal.substring(startIdx, startIdx + 1)
|
||||
let match
|
||||
|
||||
// Store the opening double curly bracket
|
||||
if (substr === '{') {
|
||||
variableStack.push({ substr, startIdx: startIdx + 1 })
|
||||
while ((match = pattern.exec(input)) !== null) {
|
||||
const inside = match[1].trim()
|
||||
|
||||
// Check if there's a colon
|
||||
if (!inside.includes(':')) {
|
||||
// If there's no colon, add to results
|
||||
results.push(inside)
|
||||
}
|
||||
|
||||
// Found the complete variable
|
||||
if (substr === '}' && variableStack.length > 0 && variableStack[variableStack.length - 1].substr === '{') {
|
||||
const variableStartIdx = variableStack[variableStack.length - 1].startIdx
|
||||
const variableEndIdx = startIdx
|
||||
const variableFullPath = returnVal.substring(variableStartIdx, variableEndIdx)
|
||||
inputVariables.push(variableFullPath)
|
||||
variableStack.pop()
|
||||
}
|
||||
startIdx += 1
|
||||
}
|
||||
return inputVariables
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
export const removeDuplicateURL = (message) => {
|
||||
|
|
|
|||
|
|
@ -12,7 +12,16 @@ import useConfirm from '@/hooks/useConfirm'
|
|||
// Material-UI
|
||||
import { IconButton, Avatar, ButtonBase, Toolbar, Box, Button, Grid, OutlinedInput, Stack, Typography } from '@mui/material'
|
||||
import { useTheme } from '@mui/material/styles'
|
||||
import { IconCode, IconArrowLeft, IconDeviceFloppy, IconSettings, IconX, IconTrash, IconWand } from '@tabler/icons-react'
|
||||
import {
|
||||
IconCode,
|
||||
IconArrowLeft,
|
||||
IconDeviceFloppy,
|
||||
IconSettings,
|
||||
IconX,
|
||||
IconTrash,
|
||||
IconWand,
|
||||
IconArrowsMaximize
|
||||
} from '@tabler/icons-react'
|
||||
|
||||
// Project import
|
||||
import MainCard from '@/ui-component/cards/MainCard'
|
||||
|
|
@ -30,6 +39,7 @@ import ViewLeadsDialog from '@/ui-component/dialog/ViewLeadsDialog'
|
|||
import Settings from '@/views/settings'
|
||||
import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog'
|
||||
import PromptGeneratorDialog from '@/ui-component/dialog/PromptGeneratorDialog'
|
||||
import ExpandTextDialog from '@/ui-component/dialog/ExpandTextDialog'
|
||||
|
||||
// API
|
||||
import assistantsApi from '@/api/assistants'
|
||||
|
|
@ -101,6 +111,8 @@ const CustomAssistantConfigurePreview = () => {
|
|||
const [isSettingsOpen, setSettingsOpen] = useState(false)
|
||||
const [assistantPromptGeneratorDialogOpen, setAssistantPromptGeneratorDialogOpen] = useState(false)
|
||||
const [assistantPromptGeneratorDialogProps, setAssistantPromptGeneratorDialogProps] = useState({})
|
||||
const [showExpandDialog, setShowExpandDialog] = useState(false)
|
||||
const [expandDialogProps, setExpandDialogProps] = useState({})
|
||||
|
||||
const [loading, setLoading] = useState(false)
|
||||
const [loadingAssistant, setLoadingAssistant] = useState(true)
|
||||
|
|
@ -525,6 +537,21 @@ const CustomAssistantConfigurePreview = () => {
|
|||
}
|
||||
}
|
||||
|
||||
const onExpandDialogClicked = (value) => {
|
||||
const dialogProps = {
|
||||
value,
|
||||
inputParam: {
|
||||
label: 'Instructions',
|
||||
name: 'instructions',
|
||||
type: 'string'
|
||||
},
|
||||
confirmButtonName: 'Save',
|
||||
cancelButtonName: 'Cancel'
|
||||
}
|
||||
setExpandDialogProps(dialogProps)
|
||||
setShowExpandDialog(true)
|
||||
}
|
||||
|
||||
const generateDocStoreToolDesc = async (storeId) => {
|
||||
const isValid = checkInputParamsMandatory()
|
||||
if (!isValid) {
|
||||
|
|
@ -955,6 +982,18 @@ const CustomAssistantConfigurePreview = () => {
|
|||
Instructions<span style={{ color: 'red' }}> *</span>
|
||||
</Typography>
|
||||
<div style={{ flex: 1 }}></div>
|
||||
<IconButton
|
||||
size='small'
|
||||
sx={{
|
||||
height: 25,
|
||||
width: 25
|
||||
}}
|
||||
title='Expand'
|
||||
color='secondary'
|
||||
onClick={() => onExpandDialogClicked(customAssistantInstruction)}
|
||||
>
|
||||
<IconArrowsMaximize />
|
||||
</IconButton>
|
||||
{selectedChatModel?.name && (
|
||||
<Button
|
||||
title='Generate instructions using model'
|
||||
|
|
@ -1329,6 +1368,15 @@ const CustomAssistantConfigurePreview = () => {
|
|||
setAssistantPromptGeneratorDialogOpen(false)
|
||||
}}
|
||||
/>
|
||||
<ExpandTextDialog
|
||||
show={showExpandDialog}
|
||||
dialogProps={expandDialogProps}
|
||||
onCancel={() => setShowExpandDialog(false)}
|
||||
onConfirm={(newValue) => {
|
||||
setCustomAssistantInstruction(newValue)
|
||||
setShowExpandDialog(false)
|
||||
}}
|
||||
></ExpandTextDialog>
|
||||
<ConfirmDialog />
|
||||
</>
|
||||
)
|
||||
|
|
|
|||
Loading…
Reference in New Issue