Chore: Update @langchain/community (#3787)
* update package versions * fix updated field defs due to lib update * Merge branch 'main' into chore/Upgrade-LC-version # Conflicts: # packages/components/package.json # pnpm-lock.yaml * lintfix * fix follow up prompt dialog * lintfix --------- Co-authored-by: Henry <hzj94@hotmail.com>
This commit is contained in:
parent
16aa3a0d29
commit
cc87d85675
|
|
@ -68,7 +68,7 @@
|
|||
},
|
||||
"resolutions": {
|
||||
"@google/generative-ai": "^0.15.0",
|
||||
"@langchain/core": "0.3.18",
|
||||
"@langchain/core": "0.3.29",
|
||||
"@qdrant/openapi-typescript-fetch": "1.2.6",
|
||||
"openai": "4.57.3",
|
||||
"protobufjs": "7.4.0"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { JinaEmbeddings, JinaEmbeddingsParams } from '@langchain/community/embeddings/jina'
|
||||
import { JinaEmbeddings } from '@langchain/community/embeddings/jina'
|
||||
|
||||
class JinaAIEmbedding_Embeddings implements INode {
|
||||
label: string
|
||||
|
|
@ -45,12 +45,11 @@ class JinaAIEmbedding_Embeddings implements INode {
|
|||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const apiKey = getCredentialParam('jinaAIAPIKey', credentialData, nodeData)
|
||||
|
||||
const obj: JinaEmbeddingsParams = {
|
||||
const model = new JinaEmbeddings({
|
||||
apiKey: apiKey,
|
||||
model: modelName
|
||||
}
|
||||
})
|
||||
|
||||
const model = new JinaEmbeddings(obj)
|
||||
return model
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -41,14 +41,14 @@
|
|||
"@langchain/aws": "0.1.2",
|
||||
"@langchain/baidu-qianfan": "^0.1.0",
|
||||
"@langchain/cohere": "^0.0.7",
|
||||
"@langchain/community": "^0.3.11",
|
||||
"@langchain/core": "0.3.18",
|
||||
"@langchain/community": "^0.3.24",
|
||||
"@langchain/core": "0.3.29",
|
||||
"@langchain/exa": "^0.0.5",
|
||||
"@langchain/google-genai": "0.1.3",
|
||||
"@langchain/google-vertexai": "^0.1.2",
|
||||
"@langchain/groq": "0.1.2",
|
||||
"@langchain/langgraph": "^0.0.22",
|
||||
"@langchain/mistralai": "^0.0.26",
|
||||
"@langchain/mistralai": "^0.2.0",
|
||||
"@langchain/mongodb": "^0.0.1",
|
||||
"@langchain/ollama": "0.1.2",
|
||||
"@langchain/openai": "0.3.13",
|
||||
|
|
|
|||
|
|
@ -96,6 +96,7 @@ export const generateFollowUpPrompts = async (
|
|||
model: providerConfig.modelName,
|
||||
temperature: parseFloat(`${providerConfig.temperature}`)
|
||||
})
|
||||
// @ts-ignore
|
||||
const structuredLLM = model.withStructuredOutput(FollowUpPromptType)
|
||||
const structuredResponse = await structuredLLM.invoke(followUpPromptsPrompt)
|
||||
return structuredResponse
|
||||
|
|
|
|||
|
|
@ -354,16 +354,20 @@ const FollowUpPrompts = ({ dialogProps }) => {
|
|||
chatbotConfig.followUpPrompts = value.followUpPrompts
|
||||
|
||||
// if the prompt is not set, save the default prompt
|
||||
if (!followUpPromptsConfig[followUpPromptsConfig.selectedProvider].prompt) {
|
||||
followUpPromptsConfig[followUpPromptsConfig.selectedProvider].prompt = followUpPromptsOptions[
|
||||
followUpPromptsConfig.selectedProvider
|
||||
].inputs.find((input) => input.name === 'prompt').default
|
||||
const selectedProvider = followUpPromptsConfig.selectedProvider
|
||||
|
||||
if (selectedProvider && followUpPromptsConfig[selectedProvider] && followUpPromptsOptions[selectedProvider]) {
|
||||
if (!followUpPromptsConfig[selectedProvider].prompt) {
|
||||
followUpPromptsConfig[selectedProvider].prompt = followUpPromptsOptions[selectedProvider].inputs.find(
|
||||
(input) => input.name === 'prompt'
|
||||
)?.default
|
||||
}
|
||||
|
||||
if (!followUpPromptsConfig[followUpPromptsConfig.selectedProvider].temperature) {
|
||||
followUpPromptsConfig[followUpPromptsConfig.selectedProvider].temperature = followUpPromptsOptions[
|
||||
followUpPromptsConfig.selectedProvider
|
||||
].inputs.find((input) => input.name === 'temperature').default
|
||||
if (!followUpPromptsConfig[selectedProvider].temperature) {
|
||||
followUpPromptsConfig[selectedProvider].temperature = followUpPromptsOptions[selectedProvider].inputs.find(
|
||||
(input) => input.name === 'temperature'
|
||||
)?.default
|
||||
}
|
||||
}
|
||||
|
||||
const saveResp = await chatflowsApi.updateChatflow(dialogProps.chatflow.id, {
|
||||
|
|
@ -462,7 +466,6 @@ const FollowUpPrompts = ({ dialogProps }) => {
|
|||
<Typography variant='h5'>Providers</Typography>
|
||||
<FormControl fullWidth>
|
||||
<Select size='small' value={selectedProvider} onChange={handleSelectedProviderChange}>
|
||||
<MenuItem value='none'>None</MenuItem>
|
||||
{Object.values(followUpPromptsOptions).map((provider) => (
|
||||
<MenuItem key={provider.name} value={provider.name}>
|
||||
{provider.label}
|
||||
|
|
|
|||
678
pnpm-lock.yaml
678
pnpm-lock.yaml
File diff suppressed because one or more lines are too long
Loading…
Reference in New Issue