From 4c47beabbcaffba93a1c9c873cb59899531f0292 Mon Sep 17 00:00:00 2001 From: Henry Date: Wed, 12 Jul 2023 01:11:02 +0100 Subject: [PATCH] update claude v2 --- .../ConversationChain/ConversationChain.ts | 26 ++++++++++++++++++- .../chatmodels/ChatAnthropic/ChatAnthropic.ts | 10 +++++++ 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/packages/components/nodes/chains/ConversationChain/ConversationChain.ts b/packages/components/nodes/chains/ConversationChain/ConversationChain.ts index f1df0183b..7b6f002d4 100644 --- a/packages/components/nodes/chains/ConversationChain/ConversationChain.ts +++ b/packages/components/nodes/chains/ConversationChain/ConversationChain.ts @@ -6,8 +6,10 @@ import { BufferMemory, ChatMessageHistory } from 'langchain/memory' import { BaseChatModel } from 'langchain/chat_models/base' import { AIMessage, HumanMessage } from 'langchain/schema' import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler' +import { flatten } from 'lodash' +import { Document } from 'langchain/document' -const systemMessage = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.` +let systemMessage = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.` class ConversationChain_Chains implements INode { label: string @@ -38,6 +40,14 @@ class ConversationChain_Chains implements INode { name: 'memory', type: 'BaseMemory' }, + { + label: 'Document', + name: 'document', + type: 'Document', + description: 'Include whole document into the context window', + optional: true, + list: true + }, { label: 'System Message', name: 'systemMessagePrompt', @@ -54,6 +64,20 @@ class ConversationChain_Chains implements INode { const model = nodeData.inputs?.model as BaseChatModel const memory = nodeData.inputs?.memory as BufferMemory const prompt = nodeData.inputs?.systemMessagePrompt as string + const docs = nodeData.inputs?.document as Document[] + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + finalDocs.push(new Document(flattenDocs[i])) + } + + let finalText = '' + for (let i = 0; i < finalDocs.length; i += 1) { + finalText += finalDocs[i].pageContent + } + + if (finalText) systemMessage = `${systemMessage}\nThe AI has the following context:\n${finalText}` const obj: any = { llm: model, diff --git a/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts b/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts index 3d861d24e..b65c7bd89 100644 --- a/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts +++ b/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts @@ -31,6 +31,16 @@ class ChatAnthropic_ChatModels implements INode { name: 'modelName', type: 'options', options: [ + { + label: 'claude-2', + name: 'claude-2', + description: 'Claude 2 latest major version, automatically get updates to the model as they are released' + }, + { + label: 'claude-instant-1', + name: 'claude-instant-1', + description: 'Claude Instant latest major version, automatically get updates to the model as they are released' + }, { label: 'claude-v1', name: 'claude-v1'