Merge pull request #27 from FlowiseAI/feature/AutoGPT

Feature/Add AutoGPT, Readfile and Writefile tools
This commit is contained in:
Henry Heng 2023-04-22 15:59:25 +01:00 committed by GitHub
commit e008718e16
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 725 additions and 11 deletions

View File

@ -0,0 +1,97 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { BaseChatModel } from 'langchain/chat_models'
import { AutoGPT } from 'langchain/experimental/autogpt'
import { Tool } from 'langchain/tools'
import { VectorStoreRetriever } from 'langchain/vectorstores/base'
class AutoGPT_Agents implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'AutoGPT'
this.name = 'autoGPT'
this.type = 'AutoGPT'
this.category = 'Agents'
this.icon = 'autogpt.png'
this.description = 'Autonomous agent with chain of thoughts for self-guided task completion'
this.baseClasses = ['AutoGPT']
this.inputs = [
{
label: 'Allowed Tools',
name: 'tools',
type: 'Tool',
list: true
},
{
label: 'Chat Model',
name: 'model',
type: 'BaseChatModel'
},
{
label: 'Vector Store Retriever',
name: 'vectorStoreRetriever',
type: 'BaseRetriever'
},
{
label: 'AutoGPT Name',
name: 'aiName',
type: 'string',
placeholder: 'Tom',
optional: true
},
{
label: 'AutoGPT Role',
name: 'aiRole',
type: 'string',
placeholder: 'Assistant',
optional: true
},
{
label: 'Maximum Loop',
name: 'maxLoop',
type: 'number',
default: 5,
optional: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const model = nodeData.inputs?.model as BaseChatModel
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as VectorStoreRetriever
const tools = nodeData.inputs?.tools as Tool[]
const aiName = (nodeData.inputs?.aiName as string) || 'AutoGPT'
const aiRole = (nodeData.inputs?.aiRole as string) || 'Assistant'
const maxLoop = nodeData.inputs?.maxLoop as string
const autogpt = AutoGPT.fromLLMAndTools(model, tools, {
memory: vectorStoreRetriever,
aiName,
aiRole
})
autogpt.maxIterations = parseInt(maxLoop, 10)
return autogpt
}
async run(nodeData: INodeData, input: string): Promise<string> {
const executor = nodeData.instance as AutoGPT
try {
const res = await executor.run([input])
return res || 'I have completed all my tasks.'
} catch (e) {
console.error(e)
throw new Error(e)
}
}
}
module.exports = { nodeClass: AutoGPT_Agents }

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

View File

@ -0,0 +1,42 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ReadFileTool } from 'langchain/tools'
import { NodeFileStore } from 'langchain/stores/file/node'
class ReadFile_Tools implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'Read File'
this.name = 'readFile'
this.type = 'ReadFile'
this.icon = 'readfile.svg'
this.category = 'Tools'
this.description = 'Read file from disk'
this.baseClasses = [this.type, 'Tool', ...getBaseClasses(ReadFileTool)]
this.inputs = [
{
label: 'Base Path',
name: 'basePath',
placeholder: `C:\\Users\\User\\Desktop`,
type: 'string',
optional: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const basePath = nodeData.inputs?.basePath as string
const store = basePath ? new NodeFileStore(basePath) : new NodeFileStore()
return new ReadFileTool({ store })
}
}
module.exports = { nodeClass: ReadFile_Tools }

View File

@ -0,0 +1,6 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-file-check" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M14 3v4a1 1 0 0 0 1 1h4"></path>
<path d="M17 21h-10a2 2 0 0 1 -2 -2v-14a2 2 0 0 1 2 -2h7l5 5v11a2 2 0 0 1 -2 2z"></path>
<path d="M9 15l2 2l4 -4"></path>
</svg>

After

Width:  |  Height:  |  Size: 470 B

View File

@ -0,0 +1,42 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { WriteFileTool } from 'langchain/tools'
import { NodeFileStore } from 'langchain/stores/file/node'
class WriteFile_Tools implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'Write File'
this.name = 'writeFile'
this.type = 'WriteFile'
this.icon = 'writefile.svg'
this.category = 'Tools'
this.description = 'Write file to disk'
this.baseClasses = [this.type, 'Tool', ...getBaseClasses(WriteFileTool)]
this.inputs = [
{
label: 'Base Path',
name: 'basePath',
placeholder: `C:\\Users\\User\\Desktop`,
type: 'string',
optional: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const basePath = nodeData.inputs?.basePath as string
const store = basePath ? new NodeFileStore(basePath) : new NodeFileStore()
return new WriteFileTool({ store })
}
}
module.exports = { nodeClass: WriteFile_Tools }

View File

@ -0,0 +1,6 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-file-pencil" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M14 3v4a1 1 0 0 0 1 1h4"></path>
<path d="M17 21h-10a2 2 0 0 1 -2 -2v-14a2 2 0 0 1 2 -2h7l5 5v11a2 2 0 0 1 -2 2z"></path>
<path d="M10 18l5 -5a1.414 1.414 0 0 0 -2 -2l-5 5v2h2z"></path>
</svg>

After

Width:  |  Height:  |  Size: 502 B

View File

@ -21,7 +21,7 @@ class Chroma_Existing_VectorStores implements INode {
this.icon = 'chroma.svg'
this.category = 'Vector Stores'
this.description = 'Load existing index from Chroma (i.e: Document has been upserted)'
this.baseClasses = [this.type, 'BaseRetriever']
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
this.inputs = [
{
label: 'Embeddings',
@ -38,7 +38,7 @@ class Chroma_Existing_VectorStores implements INode {
{
label: 'Chroma Retriever',
name: 'retriever',
baseClasses: [this.type, 'BaseRetriever']
baseClasses: this.baseClasses
},
{
label: 'Chroma Vector Store',

View File

@ -22,7 +22,7 @@ class ChromaUpsert_VectorStores implements INode {
this.icon = 'chroma.svg'
this.category = 'Vector Stores'
this.description = 'Upsert documents to Chroma'
this.baseClasses = [this.type, 'BaseRetriever']
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
this.inputs = [
{
label: 'Document',
@ -44,7 +44,7 @@ class ChromaUpsert_VectorStores implements INode {
{
label: 'Chroma Retriever',
name: 'retriever',
baseClasses: [this.type, 'BaseRetriever']
baseClasses: this.baseClasses
},
{
label: 'Chroma Vector Store',

View File

@ -22,7 +22,7 @@ class Pinecone_Existing_VectorStores implements INode {
this.icon = 'pinecone.png'
this.category = 'Vector Stores'
this.description = 'Load existing index from Pinecone (i.e: Document has been upserted)'
this.baseClasses = [this.type, 'BaseRetriever']
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
this.inputs = [
{
label: 'Embeddings',
@ -49,7 +49,7 @@ class Pinecone_Existing_VectorStores implements INode {
{
label: 'Pinecone Retriever',
name: 'retriever',
baseClasses: [this.type, 'BaseRetriever']
baseClasses: this.baseClasses
},
{
label: 'Pinecone Vector Store',

View File

@ -23,7 +23,7 @@ class PineconeUpsert_VectorStores implements INode {
this.icon = 'pinecone.png'
this.category = 'Vector Stores'
this.description = 'Upsert documents to Pinecone'
this.baseClasses = [this.type, 'BaseRetriever']
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
this.inputs = [
{
label: 'Document',
@ -55,7 +55,7 @@ class PineconeUpsert_VectorStores implements INode {
{
label: 'Pinecone Retriever',
name: 'retriever',
baseClasses: [this.type, 'BaseRetriever']
baseClasses: this.baseClasses
},
{
label: 'Pinecone Vector Store',

View File

@ -27,7 +27,7 @@
"dotenv": "^16.0.0",
"express": "^4.17.3",
"form-data": "^4.0.0",
"langchain": "^0.0.59",
"langchain": "^0.0.60",
"moment": "^2.29.3",
"node-fetch": "2",
"pdf-parse": "^1.1.1",

View File

@ -0,0 +1,521 @@
{
"description": "Use AutoGPT - Autonomous agent with chain of thoughts for self-guided task completion",
"nodes": [
{
"width": 300,
"height": 552,
"id": "pineconeExistingIndex_0",
"position": {
"x": 901.718945246004,
"y": 444.1940851368503
},
"type": "customNode",
"data": {
"id": "pineconeExistingIndex_0",
"label": "Pinecone Load Existing Index",
"name": "pineconeExistingIndex",
"type": "Pinecone",
"baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"],
"category": "Vector Stores",
"description": "Load existing index from Pinecone (i.e: Document has been upserted)",
"inputParams": [
{
"label": "Pinecone Api Key",
"name": "pineconeApiKey",
"type": "password",
"id": "pineconeExistingIndex_0-input-pineconeApiKey-password"
},
{
"label": "Pinecone Environment",
"name": "pineconeEnv",
"type": "string",
"id": "pineconeExistingIndex_0-input-pineconeEnv-string"
},
{
"label": "Pinecone Index",
"name": "pineconeIndex",
"type": "string",
"id": "pineconeExistingIndex_0-input-pineconeIndex-string"
}
],
"inputAnchors": [
{
"label": "Embeddings",
"name": "embeddings",
"type": "Embeddings",
"id": "pineconeExistingIndex_0-input-embeddings-Embeddings"
}
],
"inputs": {
"embeddings": "{{openAIEmbeddings_0.data.instance}}",
"pineconeEnv": "us-west4-gcp",
"pineconeIndex": "test"
},
"outputAnchors": [
{
"name": "output",
"label": "Output",
"type": "options",
"options": [
{
"id": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever",
"name": "retriever",
"label": "Pinecone Retriever",
"type": "Pinecone | VectorStoreRetriever | BaseRetriever"
},
{
"id": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore",
"name": "vectorStore",
"label": "Pinecone Vector Store",
"type": "Pinecone | VectorStore"
}
],
"default": "retriever"
}
],
"outputs": {
"output": "retriever"
},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 901.718945246004,
"y": 444.1940851368503
},
"dragging": false
},
{
"width": 300,
"height": 278,
"id": "serpAPI_0",
"position": {
"x": 1227.523512217788,
"y": -238.42913167235224
},
"type": "customNode",
"data": {
"id": "serpAPI_0",
"label": "Serp API",
"name": "serpAPI",
"type": "SerpAPI",
"baseClasses": ["SerpAPI", "Tool", "StructuredTool", "BaseLangChain"],
"category": "Tools",
"description": "Wrapper around SerpAPI - a real-time API to access Google search results",
"inputParams": [
{
"label": "Serp Api Key",
"name": "apiKey",
"type": "password",
"id": "serpAPI_0-input-apiKey-password"
}
],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain",
"name": "serpAPI",
"label": "SerpAPI",
"type": "SerpAPI | Tool | StructuredTool | BaseLangChain"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1227.523512217788,
"y": -238.42913167235224
},
"dragging": false
},
{
"width": 300,
"height": 472,
"id": "chatOpenAI_0",
"position": {
"x": 209.29564045622084,
"y": -57.95595709847885
},
"type": "customNode",
"data": {
"id": "chatOpenAI_0",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"],
"category": "Chat Models",
"description": "Wrapper around OpenAI large language models that use the Chat endpoint",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password",
"id": "chatOpenAI_0-input-openAIApiKey-password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "gpt-4",
"name": "gpt-4"
},
{
"label": "gpt-4-0314",
"name": "gpt-4-0314"
},
{
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
},
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
}
],
"default": "gpt-3.5-turbo",
"optional": true,
"id": "chatOpenAI_0-input-modelName-options"
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.9,
"optional": true,
"id": "chatOpenAI_0-input-temperature-number"
}
],
"inputAnchors": [],
"inputs": {
"modelName": "gpt-3.5-turbo",
"temperature": "0"
},
"outputAnchors": [
{
"id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain",
"name": "chatOpenAI",
"label": "ChatOpenAI",
"type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 209.29564045622084,
"y": -57.95595709847885
},
"dragging": false
},
{
"width": 300,
"height": 278,
"id": "openAIEmbeddings_0",
"position": {
"x": 475.13407328701226,
"y": 487.8126876963096
},
"type": "customNode",
"data": {
"id": "openAIEmbeddings_0",
"label": "OpenAI Embeddings",
"name": "openAIEmbeddings",
"type": "OpenAIEmbeddings",
"baseClasses": ["OpenAIEmbeddings", "Embeddings"],
"category": "Embeddings",
"description": "OpenAI API to generate embeddings for a given text",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password",
"id": "openAIEmbeddings_0-input-openAIApiKey-password"
}
],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
"name": "openAIEmbeddings",
"label": "OpenAIEmbeddings",
"type": "OpenAIEmbeddings | Embeddings"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 475.13407328701226,
"y": 487.8126876963096
},
"dragging": false
},
{
"width": 300,
"height": 278,
"id": "writeFile_0",
"position": {
"x": 550.6084292669964,
"y": 19.154090284731183
},
"type": "customNode",
"data": {
"id": "writeFile_0",
"label": "Write File",
"name": "writeFile",
"type": "WriteFile",
"baseClasses": ["WriteFile", "Tool", "StructuredTool", "BaseLangChain"],
"category": "Tools",
"description": "Write file to disk",
"inputParams": [
{
"label": "Base Path",
"name": "basePath",
"placeholder": "C:\\Users\\Henry\\Desktop",
"type": "string",
"optional": true,
"id": "writeFile_0-input-basePath-string"
}
],
"inputAnchors": [],
"inputs": {
"basePath": "C:\\Users\\Henry\\Desktop"
},
"outputAnchors": [
{
"id": "writeFile_0-output-writeFile-WriteFile|Tool|StructuredTool|BaseLangChain",
"name": "writeFile",
"label": "WriteFile",
"type": "WriteFile | Tool | StructuredTool | BaseLangChain"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 550.6084292669964,
"y": 19.154090284731183
},
"dragging": false
},
{
"width": 300,
"height": 278,
"id": "readFile_0",
"position": {
"x": 885.888842186094,
"y": -120.40803047194753
},
"type": "customNode",
"data": {
"id": "readFile_0",
"label": "Read File",
"name": "readFile",
"type": "ReadFile",
"baseClasses": ["ReadFile", "Tool", "StructuredTool", "BaseLangChain"],
"category": "Tools",
"description": "Read file from disk",
"inputParams": [
{
"label": "Base Path",
"name": "basePath",
"placeholder": "C:\\Users\\Henry\\Desktop",
"type": "string",
"optional": true,
"id": "readFile_0-input-basePath-string"
}
],
"inputAnchors": [],
"inputs": {
"basePath": "C:\\Users\\Henry\\Desktop"
},
"outputAnchors": [
{
"id": "readFile_0-output-readFile-ReadFile|Tool|StructuredTool|BaseLangChain",
"name": "readFile",
"label": "ReadFile",
"type": "ReadFile | Tool | StructuredTool | BaseLangChain"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 885.888842186094,
"y": -120.40803047194753
},
"dragging": false
},
{
"width": 300,
"height": 627,
"id": "autoGPT_0",
"position": {
"x": 1627.8124366169843,
"y": 129.76619452400155
},
"type": "customNode",
"data": {
"id": "autoGPT_0",
"label": "AutoGPT",
"name": "autoGPT",
"type": "AutoGPT",
"baseClasses": ["AutoGPT"],
"category": "Agents",
"description": "Autonomous agent with chain of thoughts by GPT4",
"inputParams": [
{
"label": "AutoGPT Name",
"name": "aiName",
"type": "string",
"placeholder": "Tom",
"optional": true,
"id": "autoGPT_0-input-aiName-string"
},
{
"label": "AutoGPT Role",
"name": "aiRole",
"type": "string",
"placeholder": "Assistant",
"optional": true,
"id": "autoGPT_0-input-aiRole-string"
},
{
"label": "Maximum Loop",
"name": "maxLoop",
"type": "number",
"default": 5,
"optional": true,
"id": "autoGPT_0-input-maxLoop-number"
}
],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "autoGPT_0-input-tools-Tool"
},
{
"label": "Chat Model",
"name": "model",
"type": "BaseChatModel",
"id": "autoGPT_0-input-model-BaseChatModel"
},
{
"label": "Vector Store Retriever",
"name": "vectorStoreRetriever",
"type": "BaseRetriever",
"id": "autoGPT_0-input-vectorStoreRetriever-BaseRetriever"
}
],
"inputs": {
"tools": ["{{readFile_0.data.instance}}", "{{writeFile_0.data.instance}}", "{{serpAPI_0.data.instance}}"],
"model": "{{chatOpenAI_0.data.instance}}",
"vectorStoreRetriever": "{{pineconeExistingIndex_0.data.instance}}",
"aiName": "",
"aiRole": "",
"maxLoop": 5
},
"outputAnchors": [
{
"id": "autoGPT_0-output-autoGPT-AutoGPT",
"name": "autoGPT",
"label": "AutoGPT",
"type": "AutoGPT"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1627.8124366169843,
"y": 129.76619452400155
},
"dragging": false
}
],
"edges": [
{
"source": "openAIEmbeddings_0",
"sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
"target": "pineconeExistingIndex_0",
"targetHandle": "pineconeExistingIndex_0-input-embeddings-Embeddings",
"type": "buttonedge",
"id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_0-pineconeExistingIndex_0-input-embeddings-Embeddings",
"data": {
"label": ""
}
},
{
"source": "serpAPI_0",
"sourceHandle": "serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain",
"target": "autoGPT_0",
"targetHandle": "autoGPT_0-input-tools-Tool",
"type": "buttonedge",
"id": "serpAPI_0-serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain-autoGPT_0-autoGPT_0-input-tools-Tool",
"data": {
"label": ""
}
},
{
"source": "readFile_0",
"sourceHandle": "readFile_0-output-readFile-ReadFile|Tool|StructuredTool|BaseLangChain",
"target": "autoGPT_0",
"targetHandle": "autoGPT_0-input-tools-Tool",
"type": "buttonedge",
"id": "readFile_0-readFile_0-output-readFile-ReadFile|Tool|StructuredTool|BaseLangChain-autoGPT_0-autoGPT_0-input-tools-Tool",
"data": {
"label": ""
}
},
{
"source": "writeFile_0",
"sourceHandle": "writeFile_0-output-writeFile-WriteFile|Tool|StructuredTool|BaseLangChain",
"target": "autoGPT_0",
"targetHandle": "autoGPT_0-input-tools-Tool",
"type": "buttonedge",
"id": "writeFile_0-writeFile_0-output-writeFile-WriteFile|Tool|StructuredTool|BaseLangChain-autoGPT_0-autoGPT_0-input-tools-Tool",
"data": {
"label": ""
}
},
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain",
"target": "autoGPT_0",
"targetHandle": "autoGPT_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-autoGPT_0-autoGPT_0-input-model-BaseChatModel",
"data": {
"label": ""
}
},
{
"source": "pineconeExistingIndex_0",
"sourceHandle": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever",
"target": "autoGPT_0",
"targetHandle": "autoGPT_0-input-vectorStoreRetriever-BaseRetriever",
"type": "buttonedge",
"id": "pineconeExistingIndex_0-pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-autoGPT_0-autoGPT_0-input-vectorStoreRetriever-BaseRetriever",
"data": {
"label": ""
}
}
]
}

View File

@ -292,9 +292,9 @@ export const rearrangeToolsOrdering = (newValues, sourceNodeId) => {
newValues.push(`{{${sourceNodeId}.data.instance}}`)
const sortKey = (item) => {
if (item.includes('requestsGet')) {
if (item.includes('requestsGet') || item.includes('readFile')) {
return 0
} else if (item.includes('requestsPost')) {
} else if (item.includes('requestsPost') || item.includes('writeFile')) {
return 1
} else {
return 2