Merge pull request #524 from FlowiseAI/feature/ChatAnthropic
Feature/Update claude v2
This commit is contained in:
commit
1d88ac47b9
|
|
@ -6,8 +6,10 @@ import { BufferMemory, ChatMessageHistory } from 'langchain/memory'
|
|||
import { BaseChatModel } from 'langchain/chat_models/base'
|
||||
import { AIMessage, HumanMessage } from 'langchain/schema'
|
||||
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
|
||||
import { flatten } from 'lodash'
|
||||
import { Document } from 'langchain/document'
|
||||
|
||||
const systemMessage = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.`
|
||||
let systemMessage = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.`
|
||||
|
||||
class ConversationChain_Chains implements INode {
|
||||
label: string
|
||||
|
|
@ -38,6 +40,14 @@ class ConversationChain_Chains implements INode {
|
|||
name: 'memory',
|
||||
type: 'BaseMemory'
|
||||
},
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
description: 'Include whole document into the context window',
|
||||
optional: true,
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'System Message',
|
||||
name: 'systemMessagePrompt',
|
||||
|
|
@ -54,6 +64,20 @@ class ConversationChain_Chains implements INode {
|
|||
const model = nodeData.inputs?.model as BaseChatModel
|
||||
const memory = nodeData.inputs?.memory as BufferMemory
|
||||
const prompt = nodeData.inputs?.systemMessagePrompt as string
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
|
||||
let finalText = ''
|
||||
for (let i = 0; i < finalDocs.length; i += 1) {
|
||||
finalText += finalDocs[i].pageContent
|
||||
}
|
||||
|
||||
if (finalText) systemMessage = `${systemMessage}\nThe AI has the following context:\n${finalText}`
|
||||
|
||||
const obj: any = {
|
||||
llm: model,
|
||||
|
|
|
|||
|
|
@ -31,6 +31,16 @@ class ChatAnthropic_ChatModels implements INode {
|
|||
name: 'modelName',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'claude-2',
|
||||
name: 'claude-2',
|
||||
description: 'Claude 2 latest major version, automatically get updates to the model as they are released'
|
||||
},
|
||||
{
|
||||
label: 'claude-instant-1',
|
||||
name: 'claude-instant-1',
|
||||
description: 'Claude Instant latest major version, automatically get updates to the model as they are released'
|
||||
},
|
||||
{
|
||||
label: 'claude-v1',
|
||||
name: 'claude-v1'
|
||||
|
|
|
|||
|
|
@ -249,6 +249,15 @@
|
|||
"name": "memory",
|
||||
"type": "BaseMemory",
|
||||
"id": "conversationChain_0-input-memory-BaseMemory"
|
||||
},
|
||||
{
|
||||
"label": "Document",
|
||||
"name": "document",
|
||||
"type": "Document",
|
||||
"description": "Include whole document into the context window",
|
||||
"optional": true,
|
||||
"list": true,
|
||||
"id": "conversationChain_0-input-document-Document"
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
|
|
|
|||
Loading…
Reference in New Issue