Flowise/packages/server/marketplaces/agentflowsv2/Slack Agent.json

719 lines
31 KiB
JSON

{
"description": "An agent that can post message to Slack channel",
"usecases": ["Agent"],
"nodes": [
{
"id": "startAgentflow_0",
"type": "agentFlow",
"position": {
"x": -192.5,
"y": 68
},
"data": {
"id": "startAgentflow_0",
"label": "Start",
"version": 1,
"name": "startAgentflow",
"type": "Start",
"color": "#7EE787",
"hideInput": true,
"baseClasses": ["Start"],
"category": "Agent Flows",
"description": "Starting point of the agentflow",
"inputParams": [
{
"label": "Input Type",
"name": "startInputType",
"type": "options",
"options": [
{
"label": "Chat Input",
"name": "chatInput",
"description": "Start the conversation with chat input"
},
{
"label": "Form Input",
"name": "formInput",
"description": "Start the workflow with form inputs"
}
],
"default": "chatInput",
"id": "startAgentflow_0-input-startInputType-options",
"display": true
},
{
"label": "Form Title",
"name": "formTitle",
"type": "string",
"placeholder": "Please Fill Out The Form",
"show": {
"startInputType": "formInput"
},
"id": "startAgentflow_0-input-formTitle-string",
"display": false
},
{
"label": "Form Description",
"name": "formDescription",
"type": "string",
"placeholder": "Complete all fields below to continue",
"show": {
"startInputType": "formInput"
},
"id": "startAgentflow_0-input-formDescription-string",
"display": false
},
{
"label": "Form Input Types",
"name": "formInputTypes",
"description": "Specify the type of form input",
"type": "array",
"show": {
"startInputType": "formInput"
},
"array": [
{
"label": "Type",
"name": "type",
"type": "options",
"options": [
{
"label": "String",
"name": "string"
},
{
"label": "Number",
"name": "number"
},
{
"label": "Boolean",
"name": "boolean"
},
{
"label": "Options",
"name": "options"
}
],
"default": "string"
},
{
"label": "Label",
"name": "label",
"type": "string",
"placeholder": "Label for the input"
},
{
"label": "Variable Name",
"name": "name",
"type": "string",
"placeholder": "Variable name for the input (must be camel case)",
"description": "Variable name must be camel case. For example: firstName, lastName, etc."
},
{
"label": "Add Options",
"name": "addOptions",
"type": "array",
"show": {
"formInputTypes[$index].type": "options"
},
"array": [
{
"label": "Option",
"name": "option",
"type": "string"
}
]
}
],
"id": "startAgentflow_0-input-formInputTypes-array",
"display": false
},
{
"label": "Ephemeral Memory",
"name": "startEphemeralMemory",
"type": "boolean",
"description": "Start fresh for every execution without past chat history",
"optional": true
},
{
"label": "Flow State",
"name": "startState",
"description": "Runtime state during the execution of the workflow",
"type": "array",
"optional": true,
"array": [
{
"label": "Key",
"name": "key",
"type": "string",
"placeholder": "Foo"
},
{
"label": "Value",
"name": "value",
"type": "string",
"placeholder": "Bar"
}
],
"id": "startAgentflow_0-input-startState-array",
"display": true
}
],
"inputAnchors": [],
"inputs": {
"startInputType": "chatInput",
"formTitle": "",
"formDescription": "",
"formInputTypes": "",
"startState": ""
},
"outputAnchors": [
{
"id": "startAgentflow_0-output-startAgentflow",
"label": "Start",
"name": "startAgentflow"
}
],
"outputs": {},
"selected": false
},
"width": 101,
"height": 65,
"selected": false,
"positionAbsolute": {
"x": -192.5,
"y": 68
},
"dragging": false
},
{
"id": "llmAgentflow_0",
"position": {
"x": -31.25,
"y": 64.5
},
"data": {
"id": "llmAgentflow_0",
"label": "General Agent",
"version": 1,
"name": "llmAgentflow",
"type": "LLM",
"color": "#64B5F6",
"baseClasses": ["LLM"],
"category": "Agent Flows",
"description": "Large language models to analyze user-provided inputs and generate responses",
"inputParams": [
{
"label": "Model",
"name": "llmModel",
"type": "asyncOptions",
"loadMethod": "listModels",
"loadConfig": true,
"id": "llmAgentflow_0-input-llmModel-asyncOptions",
"display": true
},
{
"label": "Messages",
"name": "llmMessages",
"type": "array",
"optional": true,
"acceptVariable": true,
"array": [
{
"label": "Role",
"name": "role",
"type": "options",
"options": [
{
"label": "System",
"name": "system"
},
{
"label": "Assistant",
"name": "assistant"
},
{
"label": "Developer",
"name": "developer"
},
{
"label": "User",
"name": "user"
}
]
},
{
"label": "Content",
"name": "content",
"type": "string",
"acceptVariable": true,
"generateInstruction": true,
"rows": 4
}
],
"id": "llmAgentflow_0-input-llmMessages-array",
"display": true
},
{
"label": "Enable Memory",
"name": "llmEnableMemory",
"type": "boolean",
"description": "Enable memory for the conversation thread",
"default": true,
"optional": true,
"id": "llmAgentflow_0-input-llmEnableMemory-boolean",
"display": true
},
{
"label": "Memory Type",
"name": "llmMemoryType",
"type": "options",
"options": [
{
"label": "All Messages",
"name": "allMessages",
"description": "Retrieve all messages from the conversation"
},
{
"label": "Window Size",
"name": "windowSize",
"description": "Uses a fixed window size to surface the last N messages"
},
{
"label": "Conversation Summary",
"name": "conversationSummary",
"description": "Summarizes the whole conversation"
},
{
"label": "Conversation Summary Buffer",
"name": "conversationSummaryBuffer",
"description": "Summarize conversations once token limit is reached. Default to 2000"
}
],
"optional": true,
"default": "allMessages",
"show": {
"llmEnableMemory": true
},
"id": "llmAgentflow_0-input-llmMemoryType-options",
"display": true
},
{
"label": "Window Size",
"name": "llmMemoryWindowSize",
"type": "number",
"default": "20",
"description": "Uses a fixed window size to surface the last N messages",
"show": {
"llmMemoryType": "windowSize"
},
"id": "llmAgentflow_0-input-llmMemoryWindowSize-number",
"display": false
},
{
"label": "Max Token Limit",
"name": "llmMemoryMaxTokenLimit",
"type": "number",
"default": "2000",
"description": "Summarize conversations once token limit is reached. Default to 2000",
"show": {
"llmMemoryType": "conversationSummaryBuffer"
},
"id": "llmAgentflow_0-input-llmMemoryMaxTokenLimit-number",
"display": false
},
{
"label": "Input Message",
"name": "llmUserMessage",
"type": "string",
"description": "Add an input message as user message at the end of the conversation",
"rows": 4,
"optional": true,
"acceptVariable": true,
"show": {
"llmEnableMemory": true
},
"id": "llmAgentflow_0-input-llmUserMessage-string",
"display": true
},
{
"label": "Return Response As",
"name": "llmReturnResponseAs",
"type": "options",
"options": [
{
"label": "User Message",
"name": "userMessage"
},
{
"label": "Assistant Message",
"name": "assistantMessage"
}
],
"default": "userMessage",
"id": "llmAgentflow_0-input-llmReturnResponseAs-options",
"display": true
},
{
"label": "JSON Structured Output",
"name": "llmStructuredOutput",
"description": "Instruct the LLM to give output in a JSON structured schema",
"type": "array",
"optional": true,
"acceptVariable": true,
"array": [
{
"label": "Key",
"name": "key",
"type": "string"
},
{
"label": "Type",
"name": "type",
"type": "options",
"options": [
{
"label": "String",
"name": "string"
},
{
"label": "String Array",
"name": "stringArray"
},
{
"label": "Number",
"name": "number"
},
{
"label": "Boolean",
"name": "boolean"
},
{
"label": "Enum",
"name": "enum"
},
{
"label": "JSON Array",
"name": "jsonArray"
}
]
},
{
"label": "Enum Values",
"name": "enumValues",
"type": "string",
"placeholder": "value1, value2, value3",
"description": "Enum values. Separated by comma",
"optional": true,
"show": {
"llmStructuredOutput[$index].type": "enum"
}
},
{
"label": "JSON Schema",
"name": "jsonSchema",
"type": "code",
"placeholder": "{\n \"answer\": {\n \"type\": \"string\",\n \"description\": \"Value of the answer\"\n },\n \"reason\": {\n \"type\": \"string\",\n \"description\": \"Reason for the answer\"\n },\n \"optional\": {\n \"type\": \"boolean\"\n },\n \"count\": {\n \"type\": \"number\"\n },\n \"children\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"object\",\n \"properties\": {\n \"value\": {\n \"type\": \"string\",\n \"description\": \"Value of the children's answer\"\n }\n }\n }\n }\n}",
"description": "JSON schema for the structured output",
"optional": true,
"show": {
"llmStructuredOutput[$index].type": "jsonArray"
}
},
{
"label": "Description",
"name": "description",
"type": "string",
"placeholder": "Description of the key"
}
],
"id": "llmAgentflow_0-input-llmStructuredOutput-array",
"display": true
},
{
"label": "Update Flow State",
"name": "llmUpdateState",
"description": "Update runtime state during the execution of the workflow",
"type": "array",
"optional": true,
"acceptVariable": true,
"array": [
{
"label": "Key",
"name": "key",
"type": "asyncOptions",
"loadMethod": "listRuntimeStateKeys",
"freeSolo": true
},
{
"label": "Value",
"name": "value",
"type": "string",
"acceptVariable": true,
"acceptNodeOutputAsVariable": true
}
],
"id": "llmAgentflow_0-input-llmUpdateState-array",
"display": true
}
],
"inputAnchors": [],
"inputs": {
"llmModel": "chatOpenAI",
"llmMessages": "",
"llmEnableMemory": true,
"llmMemoryType": "allMessages",
"llmUserMessage": "",
"llmReturnResponseAs": "userMessage",
"llmStructuredOutput": "",
"llmUpdateState": "",
"llmModelConfig": {
"credential": "",
"modelName": "gpt-4o-mini",
"temperature": 0.9,
"streaming": true,
"maxTokens": "",
"topP": "",
"frequencyPenalty": "",
"presencePenalty": "",
"timeout": "",
"strictToolCalling": "",
"stopSequence": "",
"basepath": "",
"proxyUrl": "",
"baseOptions": "",
"allowImageUploads": "",
"imageResolution": "low",
"reasoningEffort": "medium",
"llmModel": "chatOpenAI"
}
},
"outputAnchors": [
{
"id": "llmAgentflow_0-output-llmAgentflow",
"label": "LLM",
"name": "llmAgentflow"
}
],
"outputs": {},
"selected": false
},
"type": "agentFlow",
"width": 168,
"height": 71,
"selected": false,
"positionAbsolute": {
"x": -31.25,
"y": 64.5
},
"dragging": false
},
{
"id": "toolAgentflow_0",
"position": {
"x": 182.75,
"y": 64.5
},
"data": {
"id": "toolAgentflow_0",
"label": "Slack Reply",
"version": 1,
"name": "toolAgentflow",
"type": "Tool",
"color": "#d4a373",
"baseClasses": ["Tool"],
"category": "Agent Flows",
"description": "Tools allow LLM to interact with external systems",
"inputParams": [
{
"label": "Tool",
"name": "selectedTool",
"type": "asyncOptions",
"loadMethod": "listTools",
"loadConfig": true,
"id": "toolAgentflow_0-input-selectedTool-asyncOptions",
"display": true
},
{
"label": "Tool Input Arguments",
"name": "toolInputArgs",
"type": "array",
"acceptVariable": true,
"refresh": true,
"array": [
{
"label": "Input Argument Name",
"name": "inputArgName",
"type": "asyncOptions",
"loadMethod": "listToolInputArgs",
"refresh": true
},
{
"label": "Input Argument Value",
"name": "inputArgValue",
"type": "string",
"acceptVariable": true
}
],
"show": {
"selectedTool": ".+"
},
"id": "toolAgentflow_0-input-toolInputArgs-array",
"display": true
},
{
"label": "Update Flow State",
"name": "toolUpdateState",
"description": "Update runtime state during the execution of the workflow",
"type": "array",
"optional": true,
"acceptVariable": true,
"array": [
{
"label": "Key",
"name": "key",
"type": "asyncOptions",
"loadMethod": "listRuntimeStateKeys",
"freeSolo": true
},
{
"label": "Value",
"name": "value",
"type": "string",
"acceptVariable": true,
"acceptNodeOutputAsVariable": true
}
],
"id": "toolAgentflow_0-input-toolUpdateState-array",
"display": true
}
],
"inputAnchors": [],
"inputs": {
"selectedTool": "slackMCP",
"toolInputArgs": [
{
"inputArgName": "channel_id",
"inputArgValue": "<p>ABCDEFG</p>"
},
{
"inputArgName": "text",
"inputArgValue": "<p><span class=\"variable\" data-type=\"mention\" data-id=\"llmAgentflow_0\" data-label=\"llmAgentflow_0\">{{ llmAgentflow_0 }}</span> </p>"
}
],
"toolUpdateState": "",
"selectedToolConfig": {
"mcpActions": "[\"slack_post_message\"]",
"selectedTool": "slackMCP"
}
},
"outputAnchors": [
{
"id": "toolAgentflow_0-output-toolAgentflow",
"label": "Tool",
"name": "toolAgentflow"
}
],
"outputs": {},
"selected": false
},
"type": "agentFlow",
"width": 142,
"height": 71,
"selected": false,
"positionAbsolute": {
"x": 182.75,
"y": 64.5
},
"dragging": false
},
{
"id": "directReplyAgentflow_0",
"position": {
"x": 366.75,
"y": 67.5
},
"data": {
"id": "directReplyAgentflow_0",
"label": "Direct Reply To Chat",
"version": 1,
"name": "directReplyAgentflow",
"type": "DirectReply",
"color": "#4DDBBB",
"hideOutput": true,
"baseClasses": ["DirectReply"],
"category": "Agent Flows",
"description": "Directly reply to the user with a message",
"inputParams": [
{
"label": "Message",
"name": "directReplyMessage",
"type": "string",
"rows": 4,
"acceptVariable": true,
"id": "directReplyAgentflow_0-input-directReplyMessage-string",
"display": true
}
],
"inputAnchors": [],
"inputs": {
"directReplyMessage": "<p><span class=\"variable\" data-type=\"mention\" data-id=\"llmAgentflow_0\" data-label=\"llmAgentflow_0\">{{ llmAgentflow_0 }}</span> </p>"
},
"outputAnchors": [],
"outputs": {},
"selected": false
},
"type": "agentFlow",
"width": 194,
"height": 65,
"selected": false,
"positionAbsolute": {
"x": 366.75,
"y": 67.5
},
"dragging": false
}
],
"edges": [
{
"source": "startAgentflow_0",
"sourceHandle": "startAgentflow_0-output-startAgentflow",
"target": "llmAgentflow_0",
"targetHandle": "llmAgentflow_0",
"data": {
"sourceColor": "#7EE787",
"targetColor": "#64B5F6",
"isHumanInput": false
},
"type": "agentFlow",
"id": "startAgentflow_0-startAgentflow_0-output-startAgentflow-llmAgentflow_0-llmAgentflow_0"
},
{
"source": "llmAgentflow_0",
"sourceHandle": "llmAgentflow_0-output-llmAgentflow",
"target": "toolAgentflow_0",
"targetHandle": "toolAgentflow_0",
"data": {
"sourceColor": "#64B5F6",
"targetColor": "#d4a373",
"isHumanInput": false
},
"type": "agentFlow",
"id": "llmAgentflow_0-llmAgentflow_0-output-llmAgentflow-toolAgentflow_0-toolAgentflow_0"
},
{
"source": "toolAgentflow_0",
"sourceHandle": "toolAgentflow_0-output-toolAgentflow",
"target": "directReplyAgentflow_0",
"targetHandle": "directReplyAgentflow_0",
"data": {
"sourceColor": "#d4a373",
"targetColor": "#4DDBBB",
"isHumanInput": false
},
"type": "agentFlow",
"id": "toolAgentflow_0-toolAgentflow_0-output-toolAgentflow-directReplyAgentflow_0-directReplyAgentflow_0"
}
]
}