From 7952ef8df5976d7474d6755326b58e8a551e85d9 Mon Sep 17 00:00:00 2001 From: Henry Date: Tue, 19 Dec 2023 01:31:09 +0000 Subject: [PATCH] fix bedrock model empty bug --- .../components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts | 2 +- .../nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts | 2 +- packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts b/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts index 29faf5241..651d4136e 100644 --- a/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts +++ b/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts @@ -143,7 +143,7 @@ class AWSChatBedrock_ChatModels implements INode { const obj: BaseBedrockInput & BaseChatModelParams = { region: iRegion, - model: customModel ?? iModel, + model: customModel ? customModel : iModel, maxTokens: parseInt(iMax_tokens_to_sample, 10), temperature: parseFloat(iTemperature), streaming: streaming ?? true diff --git a/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts b/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts index 5f7ce17c6..af93d08c4 100644 --- a/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts +++ b/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts @@ -103,7 +103,7 @@ class AWSBedrockEmbedding_Embeddings implements INode { const customModel = nodeData.inputs?.customModel as string const obj: BedrockEmbeddingsParams = { - model: customModel ?? iModel, + model: customModel ? customModel : iModel, region: iRegion } diff --git a/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts b/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts index 459c42964..00f32b983 100644 --- a/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts +++ b/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts @@ -143,7 +143,7 @@ class AWSBedrock_LLMs implements INode { const iMax_tokens_to_sample = nodeData.inputs?.max_tokens_to_sample as string const cache = nodeData.inputs?.cache as BaseCache const obj: Partial & BaseLLMParams = { - model: customModel ?? iModel, + model: customModel ? customModel : iModel, region: iRegion, temperature: parseFloat(iTemperature), maxTokens: parseInt(iMax_tokens_to_sample, 10)