Merge pull request #1451 from FlowiseAI/chore/Upgrade-LC-0.0.213

Chore/update langchain version
This commit is contained in:
Henry Heng 2024-01-03 16:02:25 +00:00 committed by GitHub
commit f1f2f71fe8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 13 additions and 16 deletions

View File

@ -1,7 +1,6 @@
import { OpenAIBaseInput } from 'langchain/dist/types/openai-types'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { AzureOpenAIInput, ChatOpenAI } from 'langchain/chat_models/openai' import { AzureOpenAIInput, ChatOpenAI, OpenAIChatInput } from 'langchain/chat_models/openai'
import { BaseCache } from 'langchain/schema' import { BaseCache } from 'langchain/schema'
import { BaseLLMParams } from 'langchain/llms/base' import { BaseLLMParams } from 'langchain/llms/base'
@ -123,7 +122,7 @@ class AzureChatOpenAI_ChatModels implements INode {
const azureOpenAIApiDeploymentName = getCredentialParam('azureOpenAIApiDeploymentName', credentialData, nodeData) const azureOpenAIApiDeploymentName = getCredentialParam('azureOpenAIApiDeploymentName', credentialData, nodeData)
const azureOpenAIApiVersion = getCredentialParam('azureOpenAIApiVersion', credentialData, nodeData) const azureOpenAIApiVersion = getCredentialParam('azureOpenAIApiVersion', credentialData, nodeData)
const obj: Partial<AzureOpenAIInput> & BaseLLMParams & Partial<OpenAIBaseInput> = { const obj: Partial<AzureOpenAIInput> & BaseLLMParams & Partial<OpenAIChatInput> = {
temperature: parseFloat(temperature), temperature: parseFloat(temperature),
modelName, modelName,
azureOpenAIApiKey, azureOpenAIApiKey,

View File

@ -124,13 +124,13 @@ class ChatMistral_ChatModels implements INode {
const safeMode = nodeData.inputs?.safeMode as boolean const safeMode = nodeData.inputs?.safeMode as boolean
const randomSeed = nodeData.inputs?.safeMode as string const randomSeed = nodeData.inputs?.safeMode as string
const overrideEndpoint = nodeData.inputs?.overrideEndpoint as string const overrideEndpoint = nodeData.inputs?.overrideEndpoint as string
// Waiting fix from langchain + mistral to enable streaming - https://github.com/mistralai/client-js/issues/18 const streaming = nodeData.inputs?.streaming as boolean
const cache = nodeData.inputs?.cache as BaseCache const cache = nodeData.inputs?.cache as BaseCache
const obj: ChatMistralAIInput = { const obj: ChatMistralAIInput = {
apiKey: apiKey, apiKey: apiKey,
modelName: modelName modelName: modelName,
streaming: streaming ?? true
} }
if (maxOutputTokens) obj.maxTokens = parseInt(maxOutputTokens, 10) if (maxOutputTokens) obj.maxTokens = parseInt(maxOutputTokens, 10)

View File

@ -1,8 +1,7 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface' import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { ChatOllama } from 'langchain/chat_models/ollama' import { ChatOllama, ChatOllamaInput } from 'langchain/chat_models/ollama'
import { BaseCache } from 'langchain/schema' import { BaseCache } from 'langchain/schema'
import { OllamaInput } from 'langchain/dist/util/ollama'
import { BaseLLMParams } from 'langchain/llms/base' import { BaseLLMParams } from 'langchain/llms/base'
class ChatOllama_ChatModels implements INode { class ChatOllama_ChatModels implements INode {
@ -209,7 +208,7 @@ class ChatOllama_ChatModels implements INode {
const cache = nodeData.inputs?.cache as BaseCache const cache = nodeData.inputs?.cache as BaseCache
const obj: OllamaInput & BaseLLMParams = { const obj: ChatOllamaInput & BaseLLMParams = {
baseUrl, baseUrl,
temperature: parseFloat(temperature), temperature: parseFloat(temperature),
model: modelName model: modelName

View File

@ -1,7 +1,7 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface' import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { OllamaInput } from 'langchain/llms/ollama'
import { OllamaEmbeddings } from 'langchain/embeddings/ollama' import { OllamaEmbeddings } from 'langchain/embeddings/ollama'
import { OllamaInput } from 'langchain/dist/util/ollama'
class OllamaEmbedding_Embeddings implements INode { class OllamaEmbedding_Embeddings implements INode {
label: string label: string

View File

@ -1,8 +1,7 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface' import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { Ollama } from 'langchain/llms/ollama' import { Ollama, OllamaInput } from 'langchain/llms/ollama'
import { BaseCache } from 'langchain/schema' import { BaseCache } from 'langchain/schema'
import { OllamaInput } from 'langchain/dist/util/ollama'
import { BaseLLMParams } from 'langchain/llms/base' import { BaseLLMParams } from 'langchain/llms/base'
class Ollama_LLMs implements INode { class Ollama_LLMs implements INode {

View File

@ -26,8 +26,8 @@
"@gomomento/sdk-core": "^1.51.1", "@gomomento/sdk-core": "^1.51.1",
"@google-ai/generativelanguage": "^0.2.1", "@google-ai/generativelanguage": "^0.2.1",
"@huggingface/inference": "^2.6.1", "@huggingface/inference": "^2.6.1",
"@langchain/google-genai": "^0.0.3", "@langchain/google-genai": "^0.0.6",
"@langchain/mistralai": "^0.0.3", "@langchain/mistralai": "^0.0.6",
"@notionhq/client": "^2.2.8", "@notionhq/client": "^2.2.8",
"@opensearch-project/opensearch": "^1.2.0", "@opensearch-project/opensearch": "^1.2.0",
"@pinecone-database/pinecone": "^1.1.1", "@pinecone-database/pinecone": "^1.1.1",
@ -52,7 +52,7 @@
"html-to-text": "^9.0.5", "html-to-text": "^9.0.5",
"husky": "^8.0.3", "husky": "^8.0.3",
"ioredis": "^5.3.2", "ioredis": "^5.3.2",
"langchain": "^0.0.196", "langchain": "^0.0.213",
"langfuse": "^1.2.0", "langfuse": "^1.2.0",
"langfuse-langchain": "^1.0.31", "langfuse-langchain": "^1.0.31",
"langsmith": "^0.0.49", "langsmith": "^0.0.49",

View File

@ -818,7 +818,7 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component
*/ */
export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNodeData: INodeData) => { export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNodeData: INodeData) => {
const streamAvailableLLMs = { const streamAvailableLLMs = {
'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama', 'awsChatBedrock'], 'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama', 'awsChatBedrock', 'chatMistralAI'],
LLMs: ['azureOpenAI', 'openAI', 'ollama'] LLMs: ['azureOpenAI', 'openAI', 'ollama']
} }