add openai function calling support

This commit is contained in:
Henry 2023-06-14 01:05:37 +01:00
parent a9ec444514
commit 86ab30510c
20 changed files with 529 additions and 4 deletions

View File

@ -0,0 +1,69 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents'
import { Tool } from 'langchain/tools'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { flatten } from 'lodash'
class OpenAIFunctionAgent_Agents implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'OpenAI Function Agent'
this.name = 'openAIFunctionAgent'
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'openai.png'
this.description = `An agent that uses OpenAI's Function Calling functionality to pick the tool and args to call`
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.inputs = [
{
label: 'Allowed Tools',
name: 'tools',
type: 'Tool',
list: true
},
{
label: 'OpenAI Chat Model',
name: 'model',
description:
'Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer <a target="_blank" href="https://platform.openai.com/docs/guides/gpt/function-calling">docs</a> for more info',
type: 'BaseChatModel'
}
]
}
async init(nodeData: INodeData): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel
let tools = nodeData.inputs?.tools as Tool[]
tools = flatten(tools)
const executor = await initializeAgentExecutorWithOptions(tools, model, {
agentType: 'openai-functions',
verbose: process.env.DEBUG === 'true' ? true : false
})
return executor
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const executor = nodeData.instance as AgentExecutor
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const result = await executor.run(input, [handler])
return result
} else {
const result = await executor.run(input)
return result
}
}
}
module.exports = { nodeClass: OpenAIFunctionAgent_Agents }

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

@ -43,6 +43,10 @@ class ChatOpenAI_ChatModels implements INode {
label: 'gpt-4-32k-0314',
name: 'gpt-4-32k-0314'
},
{
label: 'gpt-4-0613',
name: 'gpt-4-0613'
},
{
label: 'gpt-3.5-turbo',
name: 'gpt-3.5-turbo'
@ -50,6 +54,10 @@ class ChatOpenAI_ChatModels implements INode {
{
label: 'gpt-3.5-turbo-0301',
name: 'gpt-3.5-turbo-0301'
},
{
label: 'gpt-3.5-turbo-0613',
name: 'gpt-3.5-turbo-0613'
}
],
default: 'gpt-3.5-turbo',

View File

@ -1,4 +1,4 @@
import { ZapierNLAWrapper, ZapiterNLAWrapperParams } from 'langchain/tools'
import { ZapierNLAWrapper, ZapierNLAWrapperParams } from 'langchain/tools'
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { ZapierToolKit } from 'langchain/agents'
@ -32,7 +32,7 @@ class ZapierNLA_Tools implements INode {
async init(nodeData: INodeData): Promise<any> {
const apiKey = nodeData.inputs?.apiKey as string
const obj: Partial<ZapiterNLAWrapperParams> = {
const obj: Partial<ZapierNLAWrapperParams> = {
apiKey
}
const zapier = new ZapierNLAWrapper(obj)

View File

@ -32,7 +32,7 @@
"faiss-node": "^0.2.1",
"form-data": "^4.0.0",
"graphql": "^16.6.0",
"langchain": "^0.0.91",
"langchain": "^0.0.94",
"linkifyjs": "^4.1.1",
"mammoth": "^1.5.1",
"moment": "^2.29.3",

View File

@ -286,6 +286,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -293,6 +297,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",
@ -481,6 +489,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -488,6 +500,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",
@ -620,6 +636,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -627,6 +647,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",

View File

@ -132,6 +132,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -139,6 +143,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",

View File

@ -235,6 +235,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -242,6 +246,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",

View File

@ -295,6 +295,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -302,6 +306,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",

View File

@ -42,6 +42,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -49,6 +53,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",

View File

@ -356,6 +356,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -363,6 +367,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",

View File

@ -373,6 +373,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -380,6 +384,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",

View File

@ -302,6 +302,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -309,6 +313,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",

View File

@ -299,6 +299,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -306,6 +310,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",

View File

@ -0,0 +1,327 @@
{
"description": "An agent that uses OpenAI's Function Calling functionality to pick the tool and args to call",
"nodes": [
{
"width": 300,
"height": 524,
"id": "chatOpenAI_0",
"position": {
"x": 373.8366297840716,
"y": 448.58765780622326
},
"type": "customNode",
"data": {
"id": "chatOpenAI_0",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain", "Serializable"],
"category": "Chat Models",
"description": "Wrapper around OpenAI large language models that use the Chat endpoint",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password",
"id": "chatOpenAI_0-input-openAIApiKey-password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "gpt-4",
"name": "gpt-4"
},
{
"label": "gpt-4-0314",
"name": "gpt-4-0314"
},
{
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
},
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",
"optional": true,
"id": "chatOpenAI_0-input-modelName-options"
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.9,
"optional": true,
"id": "chatOpenAI_0-input-temperature-number"
},
{
"label": "Max Tokens",
"name": "maxTokens",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-maxTokens-number"
},
{
"label": "Top Probability",
"name": "topP",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-topP-number"
},
{
"label": "Frequency Penalty",
"name": "frequencyPenalty",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-frequencyPenalty-number"
},
{
"label": "Presence Penalty",
"name": "presencePenalty",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-presencePenalty-number"
},
{
"label": "Timeout",
"name": "timeout",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-timeout-number"
},
{
"label": "BasePath",
"name": "basepath",
"type": "string",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-basepath-string"
}
],
"inputAnchors": [],
"inputs": {
"modelName": "gpt-3.5-turbo-0613",
"temperature": 0.9,
"maxTokens": "",
"topP": "",
"frequencyPenalty": "",
"presencePenalty": "",
"timeout": "",
"basepath": ""
},
"outputAnchors": [
{
"id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain|Serializable",
"name": "chatOpenAI",
"label": "ChatOpenAI",
"type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain | Serializable"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 373.8366297840716,
"y": 448.58765780622326
},
"dragging": false
},
{
"width": 300,
"height": 280,
"id": "openAIFunctionAgent_0",
"position": {
"x": 1084.5405852317417,
"y": 384.4653768834282
},
"type": "customNode",
"data": {
"id": "openAIFunctionAgent_0",
"label": "OpenAI Function Agent",
"name": "openAIFunctionAgent",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain", "Serializable"],
"category": "Agents",
"description": "An agent that uses OpenAI's Function Calling functionality to pick the tool and args to call",
"inputParams": [],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "openAIFunctionAgent_0-input-tools-Tool"
},
{
"label": "OpenAI Chat Model",
"name": "model",
"description": "Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer <a target=\"_blank\" href=\"https://platform.openai.com/docs/guides/gpt/function-calling\">docs</a> for more info",
"type": "BaseChatModel",
"id": "openAIFunctionAgent_0-input-model-BaseChatModel"
}
],
"inputs": {
"tools": ["{{calculator_0.data.instance}}", "{{serper_0.data.instance}}"],
"model": "{{chatOpenAI_0.data.instance}}"
},
"outputAnchors": [
{
"id": "openAIFunctionAgent_0-output-openAIFunctionAgent-AgentExecutor|BaseChain|BaseLangChain|Serializable",
"name": "openAIFunctionAgent",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain | BaseLangChain | Serializable"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1084.5405852317417,
"y": 384.4653768834282
},
"dragging": false
},
{
"width": 300,
"height": 278,
"id": "serper_0",
"position": {
"x": 691.7580226065319,
"y": 34.00444633899792
},
"type": "customNode",
"data": {
"id": "serper_0",
"label": "Serper",
"name": "serper",
"type": "Serper",
"baseClasses": ["Serper", "Tool", "StructuredTool", "BaseLangChain", "Serializable"],
"category": "Tools",
"description": "Wrapper around Serper.dev - Google Search API",
"inputParams": [
{
"label": "Serper Api Key",
"name": "apiKey",
"type": "password",
"id": "serper_0-input-apiKey-password"
}
],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "serper_0-output-serper-Serper|Tool|StructuredTool|BaseLangChain|Serializable",
"name": "serper",
"label": "Serper",
"type": "Serper | Tool | StructuredTool | BaseLangChain | Serializable"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 691.7580226065319,
"y": 34.00444633899792
},
"dragging": false
},
{
"width": 300,
"height": 143,
"id": "calculator_0",
"position": {
"x": 341.63347110886497,
"y": 261.6753474034481
},
"type": "customNode",
"data": {
"id": "calculator_0",
"label": "Calculator",
"name": "calculator",
"type": "Calculator",
"baseClasses": ["Calculator", "Tool", "StructuredTool", "BaseLangChain", "Serializable"],
"category": "Tools",
"description": "Perform calculations on response",
"inputParams": [],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "calculator_0-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain|Serializable",
"name": "calculator",
"label": "Calculator",
"type": "Calculator | Tool | StructuredTool | BaseLangChain | Serializable"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 341.63347110886497,
"y": 261.6753474034481
},
"dragging": false
}
],
"edges": [
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain|Serializable",
"target": "openAIFunctionAgent_0",
"targetHandle": "openAIFunctionAgent_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain|Serializable-openAIFunctionAgent_0-openAIFunctionAgent_0-input-model-BaseChatModel",
"data": {
"label": ""
}
},
{
"source": "calculator_0",
"sourceHandle": "calculator_0-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain|Serializable",
"target": "openAIFunctionAgent_0",
"targetHandle": "openAIFunctionAgent_0-input-tools-Tool",
"type": "buttonedge",
"id": "calculator_0-calculator_0-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain|Serializable-openAIFunctionAgent_0-openAIFunctionAgent_0-input-tools-Tool",
"data": {
"label": ""
}
},
{
"source": "serper_0",
"sourceHandle": "serper_0-output-serper-Serper|Tool|StructuredTool|BaseLangChain|Serializable",
"target": "openAIFunctionAgent_0",
"targetHandle": "openAIFunctionAgent_0-input-tools-Tool",
"type": "buttonedge",
"id": "serper_0-serper_0-output-serper-Serper|Tool|StructuredTool|BaseLangChain|Serializable-openAIFunctionAgent_0-openAIFunctionAgent_0-input-tools-Tool",
"data": {
"label": ""
}
}
]
}

View File

@ -42,6 +42,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -49,6 +53,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",

View File

@ -112,6 +112,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -119,6 +123,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",

View File

@ -42,6 +42,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -49,6 +53,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",
@ -318,6 +326,10 @@
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
@ -325,6 +337,10 @@
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
}
],
"default": "gpt-3.5-turbo",

View File

@ -660,7 +660,7 @@ export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNod
return (
isChatOrLLMsExist &&
endingNodeData.category === 'Chains' &&
(endingNodeData.category === 'Chains' || endingNodeData.name === 'openAIFunctionAgent') &&
!isVectorStoreFaiss(endingNodeData) &&
process.env.EXECUTION_MODE !== 'child'
)

View File

@ -95,6 +95,7 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isA
<Typography>
{inputAnchor.label}
{!inputAnchor.optional && <span style={{ color: 'red' }}>&nbsp;*</span>}
{inputAnchor.description && <TooltipWithParser style={{ marginLeft: 10 }} title={inputAnchor.description} />}
</Typography>
</Box>
</>