Merge pull request #1398 from FlowiseAI/feature/BedrockEmbeddings

Feature/Update bedrock
This commit is contained in:
Henry Heng 2023-12-15 19:03:09 +00:00 committed by GitHub
commit 08ddd83b80
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 39 additions and 11 deletions

View File

@ -1,9 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ChatBedrock } from 'langchain/chat_models/bedrock'
import { BedrockChat } from 'langchain/chat_models/bedrock'
import { BaseBedrockInput } from 'langchain/dist/util/bedrock'
import { BaseCache } from 'langchain/schema'
import { BaseLLMParams } from 'langchain/llms/base'
import { BaseChatModelParams } from 'langchain/chat_models/base'
/**
* I had to run the following to build the component
@ -25,14 +25,14 @@ class AWSChatBedrock_ChatModels implements INode {
inputs: INodeParams[]
constructor() {
this.label = 'AWS Bedrock'
this.label = 'AWS ChatBedrock'
this.name = 'awsChatBedrock'
this.version = 3.0
this.type = 'AWSChatBedrock'
this.icon = 'awsBedrock.png'
this.category = 'Chat Models'
this.description = 'Wrapper around AWS Bedrock large language models that use the Chat endpoint'
this.baseClasses = [this.type, ...getBaseClasses(ChatBedrock)]
this.baseClasses = [this.type, ...getBaseClasses(BedrockChat)]
this.credential = {
label: 'AWS Credential',
name: 'credential',
@ -102,6 +102,13 @@ class AWSChatBedrock_ChatModels implements INode {
],
default: 'anthropic.claude-v2'
},
{
label: 'Custom Model Name',
name: 'customModel',
description: 'If provided, will override model selected from Model Name option',
type: 'string',
optional: true
},
{
label: 'Temperature',
name: 'temperature',
@ -109,6 +116,7 @@ class AWSChatBedrock_ChatModels implements INode {
step: 0.1,
description: 'Temperature parameter may not apply to certain model. Please check available model parameters',
optional: true,
additionalParams: true,
default: 0.7
},
{
@ -118,6 +126,7 @@ class AWSChatBedrock_ChatModels implements INode {
step: 10,
description: 'Max Tokens parameter may not apply to certain model. Please check available model parameters',
optional: true,
additionalParams: true,
default: 200
}
]
@ -126,14 +135,15 @@ class AWSChatBedrock_ChatModels implements INode {
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const iRegion = nodeData.inputs?.region as string
const iModel = nodeData.inputs?.model as string
const customModel = nodeData.inputs?.customModel as string
const iTemperature = nodeData.inputs?.temperature as string
const iMax_tokens_to_sample = nodeData.inputs?.max_tokens_to_sample as string
const cache = nodeData.inputs?.cache as BaseCache
const streaming = nodeData.inputs?.streaming as boolean
const obj: BaseBedrockInput & BaseLLMParams = {
const obj: BaseBedrockInput & BaseChatModelParams = {
region: iRegion,
model: iModel,
model: customModel ?? iModel,
maxTokens: parseInt(iMax_tokens_to_sample, 10),
temperature: parseFloat(iTemperature),
streaming: streaming ?? true
@ -160,7 +170,7 @@ class AWSChatBedrock_ChatModels implements INode {
}
if (cache) obj.cache = cache
const amazonBedrock = new ChatBedrock(obj)
const amazonBedrock = new BedrockChat(obj)
return amazonBedrock
}
}

View File

@ -18,7 +18,7 @@ class AWSBedrockEmbedding_Embeddings implements INode {
constructor() {
this.label = 'AWS Bedrock Embeddings'
this.name = 'AWSBedrockEmbeddings'
this.version = 2.0
this.version = 3.0
this.type = 'AWSBedrockEmbeddings'
this.icon = 'awsBedrock.png'
this.category = 'Embeddings'
@ -86,6 +86,13 @@ class AWSBedrockEmbedding_Embeddings implements INode {
{ label: 'cohere.embed-multilingual-v3', name: 'cohere.embed-multilingual-v3' }
],
default: 'amazon.titan-embed-text-v1'
},
{
label: 'Custom Model Name',
name: 'customModel',
description: 'If provided, will override model selected from Model Name option',
type: 'string',
optional: true
}
]
}
@ -93,9 +100,10 @@ class AWSBedrockEmbedding_Embeddings implements INode {
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const iRegion = nodeData.inputs?.region as string
const iModel = nodeData.inputs?.model as string
const customModel = nodeData.inputs?.customModel as string
const obj: BedrockEmbeddingsParams = {
model: iModel,
model: customModel ?? iModel,
region: iRegion
}

View File

@ -27,7 +27,7 @@ class AWSBedrock_LLMs implements INode {
constructor() {
this.label = 'AWS Bedrock'
this.name = 'awsBedrock'
this.version = 2.0
this.version = 3.0
this.type = 'AWSBedrock'
this.icon = 'awsBedrock.png'
this.category = 'LLMs'
@ -105,6 +105,13 @@ class AWSBedrock_LLMs implements INode {
{ label: 'ai21.j2-ultra', name: 'ai21.j2-ultra' }
]
},
{
label: 'Custom Model Name',
name: 'customModel',
description: 'If provided, will override model selected from Model Name option',
type: 'string',
optional: true
},
{
label: 'Temperature',
name: 'temperature',
@ -112,6 +119,7 @@ class AWSBedrock_LLMs implements INode {
step: 0.1,
description: 'Temperature parameter may not apply to certain model. Please check available model parameters',
optional: true,
additionalParams: true,
default: 0.7
},
{
@ -121,6 +129,7 @@ class AWSBedrock_LLMs implements INode {
step: 10,
description: 'Max Tokens parameter may not apply to certain model. Please check available model parameters',
optional: true,
additionalParams: true,
default: 200
}
]
@ -129,11 +138,12 @@ class AWSBedrock_LLMs implements INode {
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const iRegion = nodeData.inputs?.region as string
const iModel = nodeData.inputs?.model as string
const customModel = nodeData.inputs?.customModel as string
const iTemperature = nodeData.inputs?.temperature as string
const iMax_tokens_to_sample = nodeData.inputs?.max_tokens_to_sample as string
const cache = nodeData.inputs?.cache as BaseCache
const obj: Partial<BaseBedrockInput> & BaseLLMParams = {
model: iModel,
model: customModel ?? iModel,
region: iRegion,
temperature: parseFloat(iTemperature),
maxTokens: parseInt(iMax_tokens_to_sample, 10)