Merge branch 'main' into feature/Prompt-Chaining

This commit is contained in:
Henry 2023-04-19 22:28:44 +01:00
commit 94e71b22e2
9 changed files with 630 additions and 4 deletions

View File

@ -1,6 +1,5 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutor, AgentExecutor } from 'langchain/agents'
import { Tool } from 'langchain/tools'
import { initializeAgentExecutor, AgentExecutor, Tool } from 'langchain/agents'
import { BaseChatModel } from 'langchain/chat_models/base'
import { getBaseClasses } from '../../../src/utils'

View File

@ -0,0 +1,81 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { SqlDatabaseChain } from 'langchain/chains'
import { getBaseClasses } from '../../../src/utils'
import { DataSource } from 'typeorm'
import { SqlDatabase } from 'langchain/sql_db'
import { BaseLLM } from 'langchain/llms/base'
class SqlDatabaseChain_Chains implements INode {
label: string
name: string
type: string
icon: string
category: string
baseClasses: string[]
description: string
inputs: INodeParams[]
constructor() {
this.label = 'Sql Database Chain'
this.name = 'sqlDatabaseChain'
this.type = 'SqlDatabaseChain'
this.icon = 'sqlchain.svg'
this.category = 'Chains'
this.description = 'Answer questions over a SQL database'
this.baseClasses = [this.type, ...getBaseClasses(SqlDatabaseChain)]
this.inputs = [
{
label: 'LLM',
name: 'llm',
type: 'BaseLLM'
},
{
label: 'Database',
name: 'database',
type: 'options',
options: [
{
label: 'SQlite',
name: 'sqlite'
}
],
default: 'sqlite'
},
{
label: 'Database File Path',
name: 'dbFilePath',
type: 'string',
placeholder: 'C:/Users/chinook.db'
}
]
}
async init(nodeData: INodeData): Promise<any> {
const databaseType = nodeData.inputs?.database
const llm = nodeData.inputs?.llm as BaseLLM
const dbFilePath = nodeData.inputs?.dbFilePath
const datasource = new DataSource({
type: databaseType,
database: dbFilePath
})
const db = await SqlDatabase.fromDataSourceParams({
appDataSource: datasource
})
const chain = new SqlDatabaseChain({
llm,
database: db
})
return chain
}
async run(nodeData: INodeData, input: string): Promise<string> {
const chain = nodeData.instance as SqlDatabaseChain
const res = await chain.run(input)
return res
}
}
module.exports = { nodeClass: SqlDatabaseChain_Chains }

View File

@ -0,0 +1,7 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-sql" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M12 8a2 2 0 0 1 2 2v4a2 2 0 1 1 -4 0v-4a2 2 0 0 1 2 -2z"></path>
<path d="M17 8v8h4"></path>
<path d="M13 15l1 1"></path>
<path d="M3 15a1 1 0 0 0 1 1h2a1 1 0 0 0 1 -1v-2a1 1 0 0 0 -1 -1h-2a1 1 0 0 1 -1 -1v-2a1 1 0 0 1 1 -1h2a1 1 0 0 1 1 1"></path>
</svg>

After

Width:  |  Height:  |  Size: 560 B

View File

@ -0,0 +1,41 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { AIPluginTool } from 'langchain/tools'
import { getBaseClasses } from '../../../src/utils'
class AIPlugin implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs?: INodeParams[]
constructor() {
this.label = 'AI Plugin'
this.name = 'aiPlugin'
this.type = 'AIPlugin'
this.icon = 'aiplugin.svg'
this.category = 'Tools'
this.description = 'Execute actions using ChatGPT Plugin Url'
this.baseClasses = [this.type, ...getBaseClasses(AIPluginTool)]
this.inputs = [
{
label: 'Plugin Url',
name: 'pluginUrl',
type: 'string',
placeholder: 'https://www.klarna.com/.well-known/ai-plugin.json'
}
]
}
async init(nodeData: INodeData): Promise<any> {
const pluginUrl = nodeData.inputs?.pluginUrl as string
const aiplugin = await AIPluginTool.fromPluginUrl(pluginUrl)
return aiplugin
}
}
module.exports = { nodeClass: AIPlugin }

View File

@ -0,0 +1,7 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-plug" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M9.785 6l8.215 8.215l-2.054 2.054a5.81 5.81 0 1 1 -8.215 -8.215l2.054 -2.054z"></path>
<path d="M4 20l3.5 -3.5"></path>
<path d="M15 4l-3.5 3.5"></path>
<path d="M20 9l-3.5 3.5"></path>
</svg>

After

Width:  |  Height:  |  Size: 498 B

View File

@ -0,0 +1,307 @@
{
"description": "Use ChatGPT Plugins within LangChain abstractions with GET and POST Tools",
"nodes": [
{
"width": 300,
"height": 278,
"id": "aiPlugin_0",
"position": {
"x": 992.9213747553727,
"y": 115.80946637479596
},
"type": "customNode",
"data": {
"id": "aiPlugin_0",
"label": "AI Plugin",
"name": "aiPlugin",
"type": "AIPlugin",
"baseClasses": ["AIPlugin", "Tool"],
"category": "Tools",
"description": "Execute actions using ChatGPT Plugin Url",
"inputParams": [
{
"label": "Plugin Url",
"name": "pluginUrl",
"type": "string",
"placeholder": "https://www.klarna.com/.well-known/ai-plugin.json"
}
],
"inputAnchors": [],
"inputs": {
"pluginUrl": "https://www.klarna.com/.well-known/ai-plugin.json"
},
"outputAnchors": [
{
"id": "aiPlugin_0-output-aiPlugin-AIPlugin|Tool",
"name": "aiPlugin",
"label": "AIPlugin",
"type": "AIPlugin | Tool"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 992.9213747553727,
"y": 115.80946637479596
},
"dragging": false
},
{
"width": 300,
"height": 143,
"id": "requestsPost_0",
"position": {
"x": 638.2831241951309,
"y": 294.0784991300699
},
"type": "customNode",
"data": {
"id": "requestsPost_0",
"label": "Requests Post",
"name": "requestsPost",
"type": "RequestsPost",
"baseClasses": ["RequestsPost", "Tool"],
"category": "Tools",
"description": "Execute HTTP POST requests",
"inputParams": [],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "requestsPost_0-output-requestsPost-RequestsPost|Tool",
"name": "requestsPost",
"label": "RequestsPost",
"type": "RequestsPost | Tool"
}
],
"selected": false
},
"positionAbsolute": {
"x": 638.2831241951309,
"y": 294.0784991300699
},
"selected": false,
"dragging": false
},
{
"width": 300,
"height": 143,
"id": "requestsGet_0",
"position": {
"x": 703.0477667387721,
"y": 476.8955204497346
},
"type": "customNode",
"data": {
"id": "requestsGet_0",
"label": "Requests Get",
"name": "requestsGet",
"type": "RequestsGet",
"baseClasses": ["RequestsGet", "Tool"],
"category": "Tools",
"description": "Execute HTTP GET requests",
"inputParams": [],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "requestsGet_0-output-requestsGet-RequestsGet|Tool",
"name": "requestsGet",
"label": "RequestsGet",
"type": "RequestsGet | Tool"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 703.0477667387721,
"y": 476.8955204497346
},
"dragging": false
},
{
"width": 300,
"height": 280,
"id": "mrklAgentChat_0",
"position": {
"x": 1363.057715565282,
"y": 479.27393467974
},
"type": "customNode",
"data": {
"id": "mrklAgentChat_0",
"label": "MRKL Agent for Chat Models",
"name": "mrklAgentChat",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain"],
"category": "Agents",
"description": "Agent that uses the ReAct Framework to decide what action to take, optimized to be used with Chat Models",
"inputParams": [],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "mrklAgentChat_0-input-tools-Tool"
},
{
"label": "Chat Model",
"name": "model",
"type": "BaseChatModel",
"id": "mrklAgentChat_0-input-model-BaseChatModel"
}
],
"inputs": {
"tools": ["{{requestsGet_0.data.instance}}", "{{requestsPost_0.data.instance}}", "{{aiPlugin_0.data.instance}}"],
"model": "{{chatOpenAI_0.data.instance}}"
},
"outputAnchors": [
{
"id": "mrklAgentChat_0-output-mrklAgentChat-AgentExecutor|BaseChain",
"name": "mrklAgentChat",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1363.057715565282,
"y": 479.27393467974
},
"dragging": false
},
{
"width": 300,
"height": 472,
"id": "chatOpenAI_0",
"position": {
"x": 724.4534948088211,
"y": 668.3578659651726
},
"type": "customNode",
"data": {
"id": "chatOpenAI_0",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
"description": "Wrapper around OpenAI large language models that use the Chat endpoint",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "gpt-4",
"name": "gpt-4"
},
{
"label": "gpt-4-0314",
"name": "gpt-4-0314"
},
{
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
},
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
}
],
"default": "gpt-3.5-turbo",
"optional": true
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.9,
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"modelName": "gpt-3.5-turbo",
"temperature": "0"
},
"outputAnchors": [
{
"id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"name": "chatOpenAI",
"label": "ChatOpenAI",
"type": "ChatOpenAI | BaseChatModel | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 724.4534948088211,
"y": 668.3578659651726
},
"dragging": false
}
],
"edges": [
{
"source": "aiPlugin_0",
"sourceHandle": "aiPlugin_0-output-aiPlugin-AIPlugin|Tool",
"target": "mrklAgentChat_0",
"targetHandle": "mrklAgentChat_0-input-tools-Tool",
"type": "buttonedge",
"id": "aiPlugin_0-aiPlugin_0-output-aiPlugin-AIPlugin|Tool-mrklAgentChat_0-mrklAgentChat_0-input-tools-Tool",
"data": {
"label": ""
}
},
{
"source": "requestsGet_0",
"sourceHandle": "requestsGet_0-output-requestsGet-RequestsGet|Tool",
"target": "mrklAgentChat_0",
"targetHandle": "mrklAgentChat_0-input-tools-Tool",
"type": "buttonedge",
"id": "requestsGet_0-requestsGet_0-output-requestsGet-RequestsGet|Tool-mrklAgentChat_0-mrklAgentChat_0-input-tools-Tool",
"data": {
"label": ""
}
},
{
"source": "requestsPost_0",
"sourceHandle": "requestsPost_0-output-requestsPost-RequestsPost|Tool",
"target": "mrklAgentChat_0",
"targetHandle": "mrklAgentChat_0-input-tools-Tool",
"type": "buttonedge",
"id": "requestsPost_0-requestsPost_0-output-requestsPost-RequestsPost|Tool-mrklAgentChat_0-mrklAgentChat_0-input-tools-Tool",
"data": {
"label": ""
}
},
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"target": "mrklAgentChat_0",
"targetHandle": "mrklAgentChat_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-mrklAgentChat_0-mrklAgentChat_0-input-model-BaseChatModel",
"data": {
"label": ""
}
}
]
}

View File

@ -0,0 +1,163 @@
{
"description": "Answer questions over a SQL database",
"nodes": [
{
"width": 300,
"height": 424,
"id": "sqlDatabaseChain_0",
"position": {
"x": 1271.2742585099204,
"y": 232.91561199714107
},
"type": "customNode",
"data": {
"id": "sqlDatabaseChain_0",
"label": "Sql Database Chain",
"name": "sqlDatabaseChain",
"type": "SqlDatabaseChain",
"baseClasses": ["SqlDatabaseChain", "BaseChain"],
"category": "Chains",
"description": "Answer questions over a SQL database",
"inputParams": [
{
"label": "Database",
"name": "database",
"type": "options",
"options": [
{
"label": "SQlite",
"name": "sqlite"
}
],
"default": "sqlite"
},
{
"label": "Database File Path",
"name": "dbFilePath",
"type": "string",
"placeholder": "C:/Users/chinook.db"
}
],
"inputAnchors": [
{
"label": "LLM",
"name": "llm",
"type": "BaseLLM",
"id": "sqlDatabaseChain_0-input-llm-BaseLLM"
}
],
"inputs": {
"llm": "{{openAI_0.data.instance}}",
"database": "sqlite",
"dbFilePath": ""
},
"outputAnchors": [
{
"id": "sqlDatabaseChain_0-output-sqlDatabaseChain-SqlDatabaseChain|BaseChain",
"name": "sqlDatabaseChain",
"label": "SqlDatabaseChain",
"type": "SqlDatabaseChain | BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1271.2742585099204,
"y": 232.91561199714107
},
"dragging": false
},
{
"width": 300,
"height": 472,
"id": "openAI_0",
"position": {
"x": 867.8574087065126,
"y": 209.58625096303308
},
"type": "customNode",
"data": {
"id": "openAI_0",
"label": "OpenAI",
"name": "openAI",
"type": "OpenAI",
"baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Wrapper around OpenAI large language models",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "text-davinci-003",
"name": "text-davinci-003"
},
{
"label": "text-davinci-002",
"name": "text-davinci-002"
},
{
"label": "text-curie-001",
"name": "text-curie-001"
},
{
"label": "text-babbage-001",
"name": "text-babbage-001"
}
],
"default": "text-davinci-003",
"optional": true
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.7,
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"modelName": "text-davinci-003",
"temperature": "0"
},
"outputAnchors": [
{
"id": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"name": "openAI",
"label": "OpenAI",
"type": "OpenAI | BaseLLM | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 867.8574087065126,
"y": 209.58625096303308
},
"dragging": false
}
],
"edges": [
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"target": "sqlDatabaseChain_0",
"targetHandle": "sqlDatabaseChain_0-input-llm-BaseLLM",
"type": "buttonedge",
"id": "openAI_0-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-sqlDatabaseChain_0-sqlDatabaseChain_0-input-llm-BaseLLM",
"data": {
"label": ""
}
}
]
}

View File

@ -285,3 +285,20 @@ export const getAvailableNodesForVariable = (nodes, edges, target, targetHandle)
}
return parentNodes
}
export const rearrangeToolsOrdering = (newValues, sourceNodeId) => {
// RequestsGet and RequestsPost have to be in order before other tools
newValues.push(`{{${sourceNodeId}.data.instance}}`)
const sortKey = (item) => {
if (item.includes('requestsGet')) {
return 0
} else if (item.includes('requestsPost')) {
return 1
} else {
return 2
}
}
newValues.sort((a, b) => sortKey(a) - sortKey(b))
}

View File

@ -38,7 +38,7 @@ import useConfirm from 'hooks/useConfirm'
import { IconX } from '@tabler/icons'
// utils
import { getUniqueNodeId, initNode, getEdgeLabelName } from 'utils/genericHelper'
import { getUniqueNodeId, initNode, getEdgeLabelName, rearrangeToolsOrdering } from 'utils/genericHelper'
import useNotifier from 'utils/useNotifier'
const nodeTypes = { customNode: CanvasNode }
@ -112,7 +112,11 @@ const Canvas = () => {
if (inputAnchor && inputAnchor.list) {
const newValues = node.data.inputs[targetInput] || []
newValues.push(`{{${sourceNodeId}.data.instance}}`)
if (targetInput === 'tools') {
rearrangeToolsOrdering(newValues, sourceNodeId)
} else {
newValues.push(`{{${sourceNodeId}.data.instance}}`)
}
value = newValues
} else if (inputParam && inputParam.acceptVariable) {
value = node.data.inputs[targetInput] || ''