diff --git a/package.json b/package.json
index bedc7b4f4..5698d6792 100644
--- a/package.json
+++ b/package.json
@@ -68,7 +68,7 @@
},
"resolutions": {
"@google/generative-ai": "^0.15.0",
- "@langchain/core": "0.3.18",
+ "@langchain/core": "0.3.29",
"@qdrant/openapi-typescript-fetch": "1.2.6",
"openai": "4.57.3",
"protobufjs": "7.4.0"
diff --git a/packages/components/nodes/embeddings/JinaAIEmbedding/JinaAIEmbedding.ts b/packages/components/nodes/embeddings/JinaAIEmbedding/JinaAIEmbedding.ts
index a85c4f88f..3e96cfb70 100644
--- a/packages/components/nodes/embeddings/JinaAIEmbedding/JinaAIEmbedding.ts
+++ b/packages/components/nodes/embeddings/JinaAIEmbedding/JinaAIEmbedding.ts
@@ -1,6 +1,6 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
-import { JinaEmbeddings, JinaEmbeddingsParams } from '@langchain/community/embeddings/jina'
+import { JinaEmbeddings } from '@langchain/community/embeddings/jina'
class JinaAIEmbedding_Embeddings implements INode {
label: string
@@ -45,12 +45,11 @@ class JinaAIEmbedding_Embeddings implements INode {
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('jinaAIAPIKey', credentialData, nodeData)
- const obj: JinaEmbeddingsParams = {
+ const model = new JinaEmbeddings({
apiKey: apiKey,
model: modelName
- }
+ })
- const model = new JinaEmbeddings(obj)
return model
}
}
diff --git a/packages/components/package.json b/packages/components/package.json
index d4f8a2600..543e8f7e9 100644
--- a/packages/components/package.json
+++ b/packages/components/package.json
@@ -41,14 +41,14 @@
"@langchain/aws": "0.1.2",
"@langchain/baidu-qianfan": "^0.1.0",
"@langchain/cohere": "^0.0.7",
- "@langchain/community": "^0.3.11",
- "@langchain/core": "0.3.18",
+ "@langchain/community": "^0.3.24",
+ "@langchain/core": "0.3.29",
"@langchain/exa": "^0.0.5",
"@langchain/google-genai": "0.1.3",
"@langchain/google-vertexai": "^0.1.2",
"@langchain/groq": "0.1.2",
"@langchain/langgraph": "^0.0.22",
- "@langchain/mistralai": "^0.0.26",
+ "@langchain/mistralai": "^0.2.0",
"@langchain/mongodb": "^0.0.1",
"@langchain/ollama": "0.1.2",
"@langchain/openai": "0.3.13",
diff --git a/packages/components/src/followUpPrompts.ts b/packages/components/src/followUpPrompts.ts
index 73e0455c1..b6b6865f6 100644
--- a/packages/components/src/followUpPrompts.ts
+++ b/packages/components/src/followUpPrompts.ts
@@ -96,6 +96,7 @@ export const generateFollowUpPrompts = async (
model: providerConfig.modelName,
temperature: parseFloat(`${providerConfig.temperature}`)
})
+ // @ts-ignore
const structuredLLM = model.withStructuredOutput(FollowUpPromptType)
const structuredResponse = await structuredLLM.invoke(followUpPromptsPrompt)
return structuredResponse
diff --git a/packages/ui/src/ui-component/extended/FollowUpPrompts.jsx b/packages/ui/src/ui-component/extended/FollowUpPrompts.jsx
index e4115cba1..44da9bc61 100644
--- a/packages/ui/src/ui-component/extended/FollowUpPrompts.jsx
+++ b/packages/ui/src/ui-component/extended/FollowUpPrompts.jsx
@@ -354,16 +354,20 @@ const FollowUpPrompts = ({ dialogProps }) => {
chatbotConfig.followUpPrompts = value.followUpPrompts
// if the prompt is not set, save the default prompt
- if (!followUpPromptsConfig[followUpPromptsConfig.selectedProvider].prompt) {
- followUpPromptsConfig[followUpPromptsConfig.selectedProvider].prompt = followUpPromptsOptions[
- followUpPromptsConfig.selectedProvider
- ].inputs.find((input) => input.name === 'prompt').default
- }
+ const selectedProvider = followUpPromptsConfig.selectedProvider
- if (!followUpPromptsConfig[followUpPromptsConfig.selectedProvider].temperature) {
- followUpPromptsConfig[followUpPromptsConfig.selectedProvider].temperature = followUpPromptsOptions[
- followUpPromptsConfig.selectedProvider
- ].inputs.find((input) => input.name === 'temperature').default
+ if (selectedProvider && followUpPromptsConfig[selectedProvider] && followUpPromptsOptions[selectedProvider]) {
+ if (!followUpPromptsConfig[selectedProvider].prompt) {
+ followUpPromptsConfig[selectedProvider].prompt = followUpPromptsOptions[selectedProvider].inputs.find(
+ (input) => input.name === 'prompt'
+ )?.default
+ }
+
+ if (!followUpPromptsConfig[selectedProvider].temperature) {
+ followUpPromptsConfig[selectedProvider].temperature = followUpPromptsOptions[selectedProvider].inputs.find(
+ (input) => input.name === 'temperature'
+ )?.default
+ }
}
const saveResp = await chatflowsApi.updateChatflow(dialogProps.chatflow.id, {
@@ -462,7 +466,6 @@ const FollowUpPrompts = ({ dialogProps }) => {
Providers