Merge pull request #1691 from FlowiseAI/chore/Upgrade-LC-version

Chore/upgrade lc version
This commit is contained in:
Henry Heng 2024-02-19 19:52:24 +08:00 committed by GitHub
commit 13c97d2f59
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
181 changed files with 917 additions and 750 deletions

View File

@ -1,6 +1,6 @@
import { INodeParams, INodeCredential } from '../src/Interface'
class LLMonitorApi implements INodeCredential {
class LunaryApi implements INodeCredential {
label: string
name: string
version: number
@ -8,25 +8,25 @@ class LLMonitorApi implements INodeCredential {
inputs: INodeParams[]
constructor() {
this.label = 'LLMonitor API'
this.name = 'llmonitorApi'
this.label = 'Lunary API'
this.name = 'lunaryApi'
this.version = 1.0
this.description = 'Refer to <a target="_blank" href="https://llmonitor.com/docs">official guide</a> to get APP ID'
this.description = 'Refer to <a target="_blank" href="https://lunary.ai/docs">official guide</a> to get APP ID'
this.inputs = [
{
label: 'APP ID',
name: 'llmonitorAppId',
name: 'lunaryAppId',
type: 'password',
placeholder: '<LLMonitor_APP_ID>'
placeholder: '<Lunary_APP_ID>'
},
{
label: 'Endpoint',
name: 'llmonitorEndpoint',
name: 'lunaryEndpoint',
type: 'string',
default: 'https://app.llmonitor.com'
default: 'https://app.lunary.ai'
}
]
}
}
module.exports = { credClass: LLMonitorApi }
module.exports = { credClass: LunaryApi }

View File

@ -1,11 +1,11 @@
import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface'
import { AgentExecutor } from 'langchain/agents'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { LoadPyodide, finalSystemPrompt, systemPrompt } from './core'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import axios from 'axios'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { AgentExecutor } from 'langchain/agents'
import { LLMChain } from 'langchain/chains'
import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { LoadPyodide, finalSystemPrompt, systemPrompt } from './core'
class Airtable_Agents implements INode {
label: string

View File

@ -1,13 +1,12 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { BaseChatModel } from 'langchain/chat_models/base'
import { AutoGPT } from 'langchain/experimental/autogpt'
import { Tool } from 'langchain/tools'
import { AIMessage, HumanMessage, SystemMessage } from 'langchain/schema'
import { VectorStoreRetriever } from 'langchain/vectorstores/base'
import { flatten } from 'lodash'
import { StructuredTool } from 'langchain/tools'
import { Tool, StructuredTool } from '@langchain/core/tools'
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
import { AIMessage, HumanMessage, SystemMessage } from '@langchain/core/messages'
import { VectorStoreRetriever } from '@langchain/core/vectorstores'
import { PromptTemplate } from '@langchain/core/prompts'
import { AutoGPT } from 'langchain/experimental/autogpt'
import { LLMChain } from 'langchain/chains'
import { PromptTemplate } from 'langchain/prompts'
import { INode, INodeData, INodeParams } from '../../../src/Interface'
type ObjectTool = StructuredTool
const FINISH_NAME = 'finish'

View File

@ -1,7 +1,7 @@
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
import { VectorStore } from '@langchain/core/vectorstores'
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { BabyAGI } from './core'
import { BaseChatModel } from 'langchain/chat_models/base'
import { VectorStore } from 'langchain/vectorstores/base'
class BabyAGI_Agents implements INode {
label: string

View File

@ -1,8 +1,8 @@
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
import { VectorStore } from '@langchain/core/vectorstores'
import { Document } from '@langchain/core/documents'
import { PromptTemplate } from '@langchain/core/prompts'
import { LLMChain } from 'langchain/chains'
import { BaseChatModel } from 'langchain/chat_models/base'
import { VectorStore } from 'langchain/dist/vectorstores/base'
import { Document } from 'langchain/document'
import { PromptTemplate } from 'langchain/prompts'
class TaskCreationChain extends LLMChain {
constructor(prompt: PromptTemplate, llm: BaseChatModel) {

View File

@ -1,10 +1,10 @@
import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { AgentExecutor } from 'langchain/agents'
import { LLMChain } from 'langchain/chains'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { LoadPyodide, finalSystemPrompt, systemPrompt } from './core'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
class CSV_Agents implements INode {
label: string

View File

@ -1,14 +1,16 @@
import { Tool } from 'langchain/tools'
import { BaseChatModel } from 'langchain/chat_models/base'
import { flatten } from 'lodash'
import { AgentStep, BaseMessage, ChainValues, AIMessage, HumanMessage } from 'langchain/schema'
import { RunnableSequence } from 'langchain/schema/runnable'
import { Tool } from '@langchain/core/tools'
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'
import { ChainValues } from '@langchain/core/utils/types'
import { AgentStep } from '@langchain/core/agents'
import { renderTemplate } from '@langchain/core/prompts'
import { RunnableSequence } from '@langchain/core/runnables'
import { ChatConversationalAgent } from 'langchain/agents'
import { getBaseClasses } from '../../../src/utils'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { AgentExecutor } from '../../../src/agents'
import { ChatConversationalAgent } from 'langchain/agents'
import { renderTemplate } from '@langchain/core/prompts'
const DEFAULT_PREFIX = `Assistant is a large language model trained by OpenAI.

View File

@ -1,13 +1,14 @@
import { ChainValues, AgentStep, BaseMessage } from 'langchain/schema'
import { flatten } from 'lodash'
import { ChatOpenAI } from 'langchain/chat_models/openai'
import { ChatPromptTemplate, MessagesPlaceholder } from 'langchain/prompts'
import { formatToOpenAIFunction } from 'langchain/tools'
import { RunnableSequence } from 'langchain/schema/runnable'
import { BaseMessage } from '@langchain/core/messages'
import { ChainValues } from '@langchain/core/utils/types'
import { AgentStep } from '@langchain/core/agents'
import { RunnableSequence } from '@langchain/core/runnables'
import { ChatOpenAI, formatToOpenAIFunction } from '@langchain/openai'
import { ChatPromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
import { OpenAIFunctionsAgentOutputParser } from 'langchain/agents/openai/output_parser'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { OpenAIFunctionsAgentOutputParser } from 'langchain/agents/openai/output_parser'
import { AgentExecutor, formatAgentSteps } from '../../../src/agents'
const defaultMessage = `Do your best to answer the questions. Feel free to use any tools available to look up relevant information, only if necessary.`

View File

@ -1,10 +1,12 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents'
import { getBaseClasses } from '../../../src/utils'
import { Tool } from 'langchain/tools'
import { BaseLanguageModel } from 'langchain/base_language'
import { flatten } from 'lodash'
import { AgentExecutor, createReactAgent } from 'langchain/agents'
import { pull } from 'langchain/hub'
import { Tool } from '@langchain/core/tools'
import type { PromptTemplate } from '@langchain/core/prompts'
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
import { additionalCallbacks } from '../../../src/handler'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
class MRKLAgentChat_Agents implements INode {
label: string
@ -20,7 +22,7 @@ class MRKLAgentChat_Agents implements INode {
constructor() {
this.label = 'ReAct Agent for Chat Models'
this.name = 'mrklAgentChat'
this.version = 1.0
this.version = 2.0
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'agent.svg'
@ -34,30 +36,42 @@ class MRKLAgentChat_Agents implements INode {
list: true
},
{
label: 'Language Model',
label: 'Chat Model',
name: 'model',
type: 'BaseLanguageModel'
type: 'BaseChatModel'
}
]
}
async init(nodeData: INodeData): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel
let tools = nodeData.inputs?.tools as Tool[]
tools = flatten(tools)
const executor = await initializeAgentExecutorWithOptions(tools, model, {
agentType: 'chat-zero-shot-react-description',
verbose: process.env.DEBUG === 'true' ? true : false
})
return executor
async init(): Promise<any> {
return null
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const executor = nodeData.instance as AgentExecutor
const model = nodeData.inputs?.model as BaseChatModel
let tools = nodeData.inputs?.tools as Tool[]
tools = flatten(tools)
const promptWithChat = await pull<PromptTemplate>('hwchase17/react-chat')
const agent = await createReactAgent({
llm: model,
tools,
prompt: promptWithChat
})
const executor = new AgentExecutor({
agent,
tools,
verbose: process.env.DEBUG === 'true' ? true : false
})
const callbacks = await additionalCallbacks(nodeData, options)
const result = await executor.call({ input }, [...callbacks])
const result = await executor.invoke({
input,
callbacks
})
return result?.output
}

View File

@ -1,10 +1,12 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents'
import { Tool } from 'langchain/tools'
import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { flatten } from 'lodash'
import { AgentExecutor, createReactAgent } from 'langchain/agents'
import { pull } from 'langchain/hub'
import { Tool } from '@langchain/core/tools'
import type { PromptTemplate } from '@langchain/core/prompts'
import { BaseLanguageModel } from 'langchain/base_language'
import { additionalCallbacks } from '../../../src/handler'
import { getBaseClasses } from '../../../src/utils'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
class MRKLAgentLLM_Agents implements INode {
label: string
@ -41,24 +43,35 @@ class MRKLAgentLLM_Agents implements INode {
]
}
async init(nodeData: INodeData): Promise<any> {
async init(): Promise<any> {
return null
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const model = nodeData.inputs?.model as BaseLanguageModel
let tools = nodeData.inputs?.tools as Tool[]
tools = flatten(tools)
const executor = await initializeAgentExecutorWithOptions(tools, model, {
agentType: 'zero-shot-react-description',
const prompt = await pull<PromptTemplate>('hwchase17/react')
const agent = await createReactAgent({
llm: model,
tools,
prompt
})
const executor = new AgentExecutor({
agent,
tools,
verbose: process.env.DEBUG === 'true' ? true : false
})
return executor
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const executor = nodeData.instance as AgentExecutor
const callbacks = await additionalCallbacks(nodeData, options)
const result = await executor.call({ input }, [...callbacks])
const result = await executor.invoke({
input,
callbacks
})
return result?.output
}

View File

@ -1,13 +1,14 @@
import { ChainValues, AgentStep, BaseMessage } from 'langchain/schema'
import { getBaseClasses } from '../../../src/utils'
import { flatten } from 'lodash'
import { RunnableSequence } from 'langchain/schema/runnable'
import { formatToOpenAIFunction } from 'langchain/tools'
import { ChatOpenAI } from 'langchain/chat_models/openai'
import { BaseMessage } from '@langchain/core/messages'
import { ChainValues } from '@langchain/core/utils/types'
import { AgentStep } from '@langchain/core/agents'
import { RunnableSequence } from '@langchain/core/runnables'
import { ChatOpenAI, formatToOpenAIFunction } from '@langchain/openai'
import { ChatPromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
import { OpenAIFunctionsAgentOutputParser } from 'langchain/agents/openai/output_parser'
import { getBaseClasses } from '../../../src/utils'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { ChatPromptTemplate, MessagesPlaceholder } from 'langchain/prompts'
import { OpenAIFunctionsAgentOutputParser } from 'langchain/agents/openai/output_parser'
import { AgentExecutor, formatAgentSteps } from '../../../src/agents'
class OpenAIFunctionAgent_Agents implements INode {

View File

Before

Width:  |  Height:  |  Size: 1.0 KiB

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

@ -1,6 +1,6 @@
import { INode, INodeParams } from '../../../src/Interface'
class LLMonitor_Analytic implements INode {
class Lunary_Analytic implements INode {
label: string
name: string
version: number
@ -13,10 +13,10 @@ class LLMonitor_Analytic implements INode {
credential: INodeParams
constructor() {
this.label = 'LLMonitor'
this.name = 'llmonitor'
this.label = 'Lunary'
this.name = 'lunary'
this.version = 1.0
this.type = 'LLMonitor'
this.type = 'Lunary'
this.icon = 'Lunary.svg'
this.category = 'Analytic'
this.baseClasses = [this.type]
@ -25,9 +25,9 @@ class LLMonitor_Analytic implements INode {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['llmonitorApi']
credentialNames: ['lunaryApi']
}
}
}
module.exports = { nodeClass: LLMonitor_Analytic }
module.exports = { nodeClass: Lunary_Analytic }

View File

@ -1,6 +1,6 @@
import { getBaseClasses, ICommonObject, INode, INodeData, INodeParams } from '../../../src'
import { BaseCache } from 'langchain/schema'
import { BaseCache } from '@langchain/core/caches'
import hash from 'object-hash'
import { getBaseClasses, ICommonObject, INode, INodeData, INodeParams } from '../../../src'
class InMemoryCache implements INode {
label: string

View File

@ -1,7 +1,7 @@
import { getBaseClasses, ICommonObject, INode, INodeData, INodeParams } from '../../../src'
import { Embeddings } from '@langchain/core/embeddings'
import { BaseStore } from '@langchain/core/stores'
import { CacheBackedEmbeddings } from 'langchain/embeddings/cache_backed'
import { Embeddings } from 'langchain/embeddings/base'
import { BaseStore } from 'langchain/schema/storage'
import { getBaseClasses, ICommonObject, INode, INodeData, INodeParams } from '../../../src'
class InMemoryEmbeddingCache implements INode {
label: string

View File

@ -1,6 +1,6 @@
import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src'
import { MomentoCache as LangchainMomentoCache } from 'langchain/cache/momento'
import { CacheClient, Configurations, CredentialProvider } from '@gomomento/sdk'
import { MomentoCache as LangchainMomentoCache } from '@langchain/community/caches/momento'
import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src'
class MomentoCache implements INode {
label: string

View File

@ -1,9 +1,10 @@
import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src'
import { RedisCache as LangchainRedisCache } from 'langchain/cache/ioredis'
import { Redis, RedisOptions } from 'ioredis'
import { isEqual } from 'lodash'
import { Generation, ChatGeneration, StoredGeneration, mapStoredMessageToChatMessage } from 'langchain/schema'
import hash from 'object-hash'
import { RedisCache as LangchainRedisCache } from '@langchain/community/caches/ioredis'
import { StoredGeneration, mapStoredMessageToChatMessage } from '@langchain/core/messages'
import { Generation, ChatGeneration } from '@langchain/core/outputs'
import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src'
let redisClientSingleton: Redis
let redisClientOption: RedisOptions

View File

@ -1,9 +1,9 @@
import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src'
import { Redis, RedisOptions } from 'ioredis'
import { isEqual } from 'lodash'
import { RedisByteStore } from '@langchain/community/storage/ioredis'
import { Embeddings } from '@langchain/core/embeddings'
import { CacheBackedEmbeddings } from 'langchain/embeddings/cache_backed'
import { RedisByteStore } from 'langchain/storage/ioredis'
import { Embeddings } from 'langchain/embeddings/base'
import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src'
let redisClientSingleton: Redis
let redisClientOption: RedisOptions

View File

@ -1,5 +1,5 @@
import { UpstashRedisCache as LangchainUpstashRedisCache } from '@langchain/community/caches/upstash_redis'
import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src'
import { UpstashRedisCache as LangchainUpstashRedisCache } from 'langchain/cache/upstash_redis'
class UpstashRedisCache implements INode {
label: string

View File

@ -1,8 +1,8 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { PromptTemplate } from '@langchain/core/prompts'
import { APIChain } from 'langchain/chains'
import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { PromptTemplate } from 'langchain/prompts'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
export const API_URL_RAW_PROMPT_TEMPLATE = `You are given the below API Documentation:

View File

@ -1,7 +1,7 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ChatOpenAI } from '@langchain/openai'
import { APIChain, createOpenAPIChain } from 'langchain/chains'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ChatOpenAI } from 'langchain/chat_models/openai'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
class OpenApiChain_Chains implements INode {

View File

@ -1,9 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { PromptTemplate } from 'langchain/prompts'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { PromptTemplate } from '@langchain/core/prompts'
import { API_RESPONSE_RAW_PROMPT_TEMPLATE, API_URL_RAW_PROMPT_TEMPLATE, APIChain } from './postCore'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
class POSTApiChain_Chains implements INode {
label: string

View File

@ -1,8 +1,8 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { CallbackManagerForChainRun } from 'langchain/callbacks'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { CallbackManagerForChainRun } from '@langchain/core/callbacks/manager'
import { BaseChain, ChainInputs, LLMChain, SerializedAPIChain } from 'langchain/chains'
import { BasePromptTemplate, PromptTemplate } from 'langchain/prompts'
import { ChainValues } from 'langchain/schema'
import { BasePromptTemplate, PromptTemplate } from '@langchain/core/prompts'
import { ChainValues } from '@langchain/core/utils/types'
import fetch from 'node-fetch'
export const API_URL_RAW_PROMPT_TEMPLATE = `You are given the below API Documentation:

View File

@ -1,12 +1,12 @@
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ConversationChain } from 'langchain/chains'
import { getBaseClasses, handleEscapeCharacters } from '../../../src/utils'
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts'
import { BaseChatModel } from 'langchain/chat_models/base'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { RunnableSequence } from 'langchain/schema/runnable'
import { StringOutputParser } from 'langchain/schema/output_parser'
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from '@langchain/core/prompts'
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
import { RunnableSequence } from '@langchain/core/runnables'
import { StringOutputParser } from '@langchain/core/output_parsers'
import { ConsoleCallbackHandler as LCConsoleCallbackHandler } from '@langchain/core/tracers/console'
import { ConversationChain } from 'langchain/chains'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { getBaseClasses, handleEscapeCharacters } from '../../../src/utils'
import { checkInputs, Moderation, streamResponse } from '../../moderation/Moderation'
import { formatResponse } from '../../outputparsers/OutputParserHelpers'

View File

@ -1,19 +1,18 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { ConversationalRetrievalQAChain } from 'langchain/chains'
import { BaseRetriever } from 'langchain/schema/retriever'
import { BufferMemoryInput } from 'langchain/memory'
import { PromptTemplate } from 'langchain/prompts'
import { QA_TEMPLATE, REPHRASE_TEMPLATE, RESPONSE_TEMPLATE } from './prompts'
import { Runnable, RunnableSequence, RunnableMap, RunnableBranch, RunnableLambda } from 'langchain/schema/runnable'
import { BaseMessage, HumanMessage, AIMessage } from 'langchain/schema'
import { StringOutputParser } from 'langchain/schema/output_parser'
import type { Document } from 'langchain/document'
import { ChatPromptTemplate, MessagesPlaceholder } from 'langchain/prompts'
import { applyPatch } from 'fast-json-patch'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { BaseRetriever } from '@langchain/core/retrievers'
import { PromptTemplate, ChatPromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
import { Runnable, RunnableSequence, RunnableMap, RunnableBranch, RunnableLambda } from '@langchain/core/runnables'
import { BaseMessage, HumanMessage, AIMessage } from '@langchain/core/messages'
import { ConsoleCallbackHandler as LCConsoleCallbackHandler } from '@langchain/core/tracers/console'
import { StringOutputParser } from '@langchain/core/output_parsers'
import type { Document } from '@langchain/core/documents'
import { BufferMemoryInput } from 'langchain/memory'
import { ConversationalRetrievalQAChain } from 'langchain/chains'
import { convertBaseMessagetoIMessage, getBaseClasses } from '../../../src/utils'
import { ConsoleCallbackHandler, additionalCallbacks } from '../../../src/handler'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, MemoryMethods } from '../../../src/Interface'
import { ConsoleCallbackHandler as LCConsoleCallbackHandler } from '@langchain/core/tracers/console'
import { QA_TEMPLATE, REPHRASE_TEMPLATE, RESPONSE_TEMPLATE } from './prompts'
type RetrievalChainInput = {
chat_history: string

View File

@ -1,13 +1,12 @@
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses, handleEscapeCharacters } from '../../../src/utils'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel, BaseLanguageModelCallOptions } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { BaseOutputParser } from 'langchain/schema/output_parser'
import { formatResponse, injectOutputParser } from '../../outputparsers/OutputParserHelpers'
import { BaseLLMOutputParser } from 'langchain/schema/output_parser'
import { BaseLanguageModel, BaseLanguageModelCallOptions } from '@langchain/core/language_models/base'
import { BaseLLMOutputParser, BaseOutputParser } from '@langchain/core/output_parsers'
import { OutputFixingParser } from 'langchain/output_parsers'
import { LLMChain } from 'langchain/chains'
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { getBaseClasses, handleEscapeCharacters } from '../../../src/utils'
import { checkInputs, Moderation, streamResponse } from '../../moderation/Moderation'
import { formatResponse, injectOutputParser } from '../../outputparsers/OutputParserHelpers'
class LLMChain_Chains implements INode {
label: string

View File

@ -1,7 +1,7 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { MultiPromptChain } from 'langchain/chains'
import { ICommonObject, INode, INodeData, INodeParams, PromptRetriever } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { MultiPromptChain } from 'langchain/chains'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
class MultiPromptChain_Chains implements INode {

View File

@ -1,7 +1,7 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { MultiRetrievalQAChain } from 'langchain/chains'
import { ICommonObject, INode, INodeData, INodeParams, VectorStoreRetriever } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { MultiRetrievalQAChain } from 'langchain/chains'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
class MultiRetrievalQAChain_Chains implements INode {

View File

@ -1,9 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { BaseRetriever } from '@langchain/core/retrievers'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { RetrievalQAChain } from 'langchain/chains'
import { BaseRetriever } from 'langchain/schema/retriever'
import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
class RetrievalQAChain_Chains implements INode {
label: string

View File

@ -1,12 +1,12 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { SqlDatabaseChain, SqlDatabaseChainInput, DEFAULT_SQL_DATABASE_PROMPT } from 'langchain/chains/sql_db'
import { getBaseClasses, getInputVariables } from '../../../src/utils'
import { DataSource } from 'typeorm'
import { SqlDatabase } from 'langchain/sql_db'
import { BaseLanguageModel } from 'langchain/base_language'
import { PromptTemplate, PromptTemplateInput } from 'langchain/prompts'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { DataSourceOptions } from 'typeorm/data-source'
import { DataSource } from 'typeorm'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { PromptTemplate, PromptTemplateInput } from '@langchain/core/prompts'
import { SqlDatabaseChain, SqlDatabaseChainInput, DEFAULT_SQL_DATABASE_PROMPT } from 'langchain/chains/sql_db'
import { SqlDatabase } from 'langchain/sql_db'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { getBaseClasses, getInputVariables } from '../../../src/utils'
type DatabaseType = 'sqlite' | 'postgres' | 'mssql' | 'mysql'

View File

@ -1,9 +1,9 @@
import fetch from 'node-fetch'
import { Document } from '@langchain/core/documents'
import { VectaraStore } from '@langchain/community/vectorstores/vectara'
import { VectorDBQAChain } from 'langchain/chains'
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { VectorDBQAChain } from 'langchain/chains'
import { Document } from 'langchain/document'
import { VectaraStore } from 'langchain/vectorstores/vectara'
import fetch from 'node-fetch'
// functionality based on https://github.com/vectara/vectara-answer
const reorderCitations = (unorderedSummary: string) => {

View File

@ -1,9 +1,9 @@
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { VectorStore } from '@langchain/core/vectorstores'
import { VectorDBQAChain } from 'langchain/chains'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { VectorDBQAChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { VectorStore } from 'langchain/vectorstores/base'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
class VectorDBQAChain_Chains implements INode {
label: string

View File

@ -1,9 +1,9 @@
import { BedrockChat } from '@langchain/community/chat_models/bedrock'
import { BaseCache } from '@langchain/core/caches'
import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'
import { BaseBedrockInput } from 'langchain/dist/util/bedrock'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { BedrockChat } from 'langchain/chat_models/bedrock'
import { BaseBedrockInput } from 'langchain/dist/util/bedrock'
import { BaseCache } from 'langchain/schema'
import { BaseChatModelParams } from 'langchain/chat_models/base'
/**
* I had to run the following to build the component

View File

@ -1,8 +1,8 @@
import { AzureOpenAIInput, ChatOpenAI, OpenAIChatInput } from '@langchain/openai'
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { AzureOpenAIInput, ChatOpenAI, OpenAIChatInput } from 'langchain/chat_models/openai'
import { BaseCache } from 'langchain/schema'
import { BaseLLMParams } from 'langchain/llms/base'
class AzureChatOpenAI_ChatModels implements INode {
label: string

View File

@ -1,7 +1,7 @@
import { BaseCache } from '@langchain/core/caches'
import { NIBittensorChatModel, BittensorInput } from 'langchain/experimental/chat_models/bittensor'
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { NIBittensorChatModel, BittensorInput } from 'langchain/experimental/chat_models/bittensor'
import { BaseCache } from 'langchain/schema'
class Bittensor_ChatModels implements INode {
label: string

View File

@ -1,8 +1,8 @@
import { AnthropicInput, ChatAnthropic } from '@langchain/anthropic'
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { AnthropicInput, ChatAnthropic } from 'langchain/chat_models/anthropic'
import { BaseCache } from 'langchain/schema'
import { BaseLLMParams } from 'langchain/llms/base'
class ChatAnthropic_ChatModels implements INode {
label: string

View File

@ -1,9 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { convertMultiOptionsToStringArray, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { BaseCache } from 'langchain/schema'
import { ChatGoogleGenerativeAI, GoogleGenerativeAIChatInput } from '@langchain/google-genai'
import { HarmBlockThreshold, HarmCategory } from '@google/generative-ai'
import type { SafetySetting } from '@google/generative-ai'
import { ChatGoogleGenerativeAI, GoogleGenerativeAIChatInput } from '@langchain/google-genai'
import { BaseCache } from '@langchain/core/caches'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { convertMultiOptionsToStringArray, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
class GoogleGenerativeAI_ChatModels implements INode {
label: string

View File

@ -1,7 +1,7 @@
import { ChatGooglePaLM, GooglePaLMChatInput } from '@langchain/community/chat_models/googlepalm'
import { BaseCache } from '@langchain/core/caches'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ChatGooglePaLM, GooglePaLMChatInput } from 'langchain/chat_models/googlepalm'
import { BaseCache } from 'langchain/schema'
class ChatGooglePaLM_ChatModels implements INode {
label: string

View File

@ -1,8 +1,8 @@
import { GoogleAuthOptions } from 'google-auth-library'
import { BaseCache } from '@langchain/core/caches'
import { ChatGoogleVertexAI, GoogleVertexAIChatInput } from '@langchain/community/chat_models/googlevertexai'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ChatGoogleVertexAI, GoogleVertexAIChatInput } from 'langchain/chat_models/googlevertexai'
import { GoogleAuthOptions } from 'google-auth-library'
import { BaseCache } from 'langchain/schema'
class GoogleVertexAI_ChatModels implements INode {
label: string

View File

@ -1,7 +1,7 @@
import { BaseCache } from '@langchain/core/caches'
import { HFInput, HuggingFaceInference } from './core'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { HFInput, HuggingFaceInference } from './core'
import { BaseCache } from 'langchain/schema'
class ChatHuggingFace_ChatModels implements INode {
label: string

View File

@ -1,5 +1,5 @@
import { LLM, BaseLLMParams } from '@langchain/core/language_models/llms'
import { getEnvironmentVariable } from '../../../src/utils'
import { LLM, BaseLLMParams } from 'langchain/llms/base'
export interface HFInput {
/** Model to use */

View File

@ -1,9 +1,8 @@
import { OpenAIChatInput, ChatOpenAI } from '@langchain/openai'
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { OpenAIChat } from 'langchain/llms/openai'
import { OpenAIChatInput } from 'langchain/chat_models/openai'
import { BaseCache } from 'langchain/schema'
import { BaseLLMParams } from 'langchain/llms/base'
class ChatLocalAI_ChatModels implements INode {
label: string
@ -25,7 +24,7 @@ class ChatLocalAI_ChatModels implements INode {
this.icon = 'localai.png'
this.category = 'Chat Models'
this.description = 'Use local LLMs like llama.cpp, gpt4all using LocalAI'
this.baseClasses = [this.type, 'BaseChatModel', ...getBaseClasses(OpenAIChat)]
this.baseClasses = [this.type, 'BaseChatModel', ...getBaseClasses(ChatOpenAI)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
@ -111,7 +110,7 @@ class ChatLocalAI_ChatModels implements INode {
if (cache) obj.cache = cache
if (localAIApiKey) obj.openAIApiKey = localAIApiKey
const model = new OpenAIChat(obj, { basePath })
const model = new ChatOpenAI(obj, { basePath })
return model
}

View File

@ -1,7 +1,7 @@
import { BaseCache } from '@langchain/core/caches'
import { ChatMistralAI, ChatMistralAIInput } from '@langchain/mistralai'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { BaseCache } from 'langchain/schema'
import { ChatMistralAI, ChatMistralAIInput } from '@langchain/mistralai'
class ChatMistral_ChatModels implements INode {
label: string

View File

@ -1,8 +1,8 @@
import { ChatOllama, ChatOllamaInput } from '@langchain/community/chat_models/ollama'
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ChatOllama, ChatOllamaInput } from 'langchain/chat_models/ollama'
import { BaseCache } from 'langchain/schema'
import { BaseLLMParams } from 'langchain/llms/base'
class ChatOllama_ChatModels implements INode {
label: string

View File

@ -1,8 +1,8 @@
import { ChatOpenAI, OpenAIChatInput } from '@langchain/openai'
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ChatOpenAI, OpenAIChatInput } from 'langchain/chat_models/openai'
import { BaseCache } from 'langchain/schema'
import { BaseLLMParams } from 'langchain/llms/base'
class ChatOpenAI_ChatModels implements INode {
label: string

View File

@ -1,8 +1,8 @@
import { ChatOpenAI, OpenAIChatInput } from '@langchain/openai'
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ChatOpenAI, OpenAIChatInput } from 'langchain/chat_models/openai'
import { BaseCache } from 'langchain/schema'
import { BaseLLMParams } from 'langchain/llms/base'
class ChatOpenAICustom_ChatModels implements INode {
label: string

View File

@ -1,8 +1,8 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import axios, { AxiosRequestConfig } from 'axios'
import { Document } from '@langchain/core/documents'
import { TextSplitter } from 'langchain/text_splitter'
import { BaseDocumentLoader } from 'langchain/document_loaders/base'
import { Document } from 'langchain/document'
import axios, { AxiosRequestConfig } from 'axios'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
class API_DocumentLoaders implements INode {
label: string

View File

@ -1,9 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import axios from 'axios'
import { Document } from '@langchain/core/documents'
import { TextSplitter } from 'langchain/text_splitter'
import { BaseDocumentLoader } from 'langchain/document_loaders/base'
import { Document } from 'langchain/document'
import axios from 'axios'
import { getCredentialData, getCredentialParam } from '../../../src/utils'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
class Airtable_DocumentLoaders implements INode {
label: string

View File

@ -2,7 +2,7 @@ import { INode, INodeData, INodeParams, ICommonObject } from '../../../src/Inter
import { getCredentialData, getCredentialParam } from '../../../src/utils'
import { TextSplitter } from 'langchain/text_splitter'
import { ApifyDatasetLoader } from 'langchain/document_loaders/web/apify_dataset'
import { Document } from 'langchain/document'
import { Document } from '@langchain/core/documents'
class ApifyWebsiteContentCrawler_DocumentLoaders implements INode {
label: string

View File

@ -1,6 +1,6 @@
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { TextSplitter } from 'langchain/text_splitter'
import { Document } from 'langchain/document'
import { Document } from '@langchain/core/documents'
import { handleEscapeCharacters } from '../../../src'
class PlainText_DocumentLoaders implements INode {

View File

@ -1,97 +0,0 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { TextSplitter } from 'langchain/text_splitter'
import { SRTLoader } from 'langchain/document_loaders/fs/srt'
class Subtitles_DocumentLoaders implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'Subtitles File'
this.name = 'subtitlesFile'
this.version = 1.0
this.type = 'Document'
this.icon = 'subtitlesFile.svg'
this.category = 'Document Loaders'
this.description = `Load data from subtitles files`
this.baseClasses = [this.type]
this.inputs = [
{
label: 'Subtitles File',
name: 'subtitlesFile',
type: 'file',
fileType: '.srt'
},
{
label: 'Text Splitter',
name: 'textSplitter',
type: 'TextSplitter',
optional: true
},
{
label: 'Metadata',
name: 'metadata',
type: 'json',
optional: true,
additionalParams: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
const subtitlesFileBase64 = nodeData.inputs?.subtitlesFile as string
const metadata = nodeData.inputs?.metadata
let alldocs = []
let files: string[] = []
if (subtitlesFileBase64.startsWith('[') && subtitlesFileBase64.endsWith(']')) {
files = JSON.parse(subtitlesFileBase64)
} else {
files = [subtitlesFileBase64]
}
for (const file of files) {
const splitDataURI = file.split(',')
splitDataURI.pop()
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
const blob = new Blob([bf])
const loader = new SRTLoader(blob)
if (textSplitter) {
const docs = await loader.loadAndSplit(textSplitter)
alldocs.push(...docs)
} else {
const docs = await loader.load()
alldocs.push(...docs)
}
}
if (metadata) {
const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata)
let finaldocs = []
for (const doc of alldocs) {
const newdoc = {
...doc,
metadata: {
...doc.metadata,
...parsedMetadata
}
}
finaldocs.push(newdoc)
}
return finaldocs
}
return alldocs
}
}
module.exports = { nodeClass: Subtitles_DocumentLoaders }

View File

@ -1,6 +0,0 @@
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M10.01 20.45C9.01153 19.7553 6 19.6248 6 21.8374C6 24.288 10.5 22.6248 10.5 25.2074C10.5 27.1484 7.49621 27.766 6 26.5" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M24.5 27V20M22 20H27" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M18 5H9C7.89543 5 7 5.89543 7 7V15M18 5L25 12M18 5V12H25M25 12V15" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M14 27V20H17C18.1046 20 19 20.8954 19 22C19 23.1046 18.1046 24 17 24M17 24H14M17 24L19 27" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 763 B

View File

@ -1,7 +1,7 @@
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { TextSplitter } from 'langchain/text_splitter'
import { TextLoader } from 'langchain/document_loaders/fs/text'
import { Document } from 'langchain/document'
import { Document } from '@langchain/core/documents'
import { handleEscapeCharacters } from '../../../src'
class Text_DocumentLoaders implements INode {

View File

@ -1,4 +1,4 @@
import { VectorStore } from 'langchain/vectorstores/base'
import { VectorStore } from '@langchain/core/vectorstores'
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { handleEscapeCharacters } from '../../../src/utils'

View File

@ -1,7 +1,7 @@
import { BedrockRuntimeClient, InvokeModelCommand } from '@aws-sdk/client-bedrock-runtime'
import { BedrockEmbeddings, BedrockEmbeddingsParams } from '@langchain/community/embeddings/bedrock'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { BedrockEmbeddings, BedrockEmbeddingsParams } from 'langchain/embeddings/bedrock'
import { BedrockRuntimeClient, InvokeModelCommand } from '@aws-sdk/client-bedrock-runtime'
class AWSBedrockEmbedding_Embeddings implements INode {
label: string

View File

@ -1,7 +1,6 @@
import { AzureOpenAIInput } from 'langchain/chat_models/openai'
import { AzureOpenAIInput, OpenAIEmbeddings, OpenAIEmbeddingsParams } from '@langchain/openai'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from 'langchain/embeddings/openai'
class AzureOpenAIEmbedding_Embeddings implements INode {
label: string

View File

@ -1,6 +1,6 @@
import { CohereEmbeddings, CohereEmbeddingsParams } from '@langchain/cohere'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { CohereEmbeddings, CohereEmbeddingsParams } from 'langchain/embeddings/cohere'
class CohereEmbedding_Embeddings implements INode {
label: string
@ -64,7 +64,7 @@ class CohereEmbedding_Embeddings implements INode {
apiKey: cohereApiKey
}
if (modelName) obj.modelName = modelName
if (modelName) obj.model = modelName
const model = new CohereEmbeddings(obj)
return model

View File

@ -1,6 +1,6 @@
import { GooglePaLMEmbeddings, GooglePaLMEmbeddingsParams } from '@langchain/community/embeddings/googlepalm'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { GooglePaLMEmbeddings, GooglePaLMEmbeddingsParams } from 'langchain/embeddings/googlepalm'
class GooglePaLMEmbedding_Embeddings implements INode {
label: string

View File

@ -1,7 +1,7 @@
import { GoogleVertexAIEmbeddings, GoogleVertexAIEmbeddingsParams } from 'langchain/embeddings/googlevertexai'
import { GoogleAuthOptions } from 'google-auth-library'
import { GoogleVertexAIEmbeddings, GoogleVertexAIEmbeddingsParams } from '@langchain/community/embeddings/googlevertexai'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { GoogleAuthOptions } from 'google-auth-library'
class GoogleVertexAIEmbedding_Embeddings implements INode {
label: string

View File

@ -1,5 +1,5 @@
import { HfInference } from '@huggingface/inference'
import { Embeddings, EmbeddingsParams } from 'langchain/embeddings/base'
import { Embeddings, EmbeddingsParams } from '@langchain/core/embeddings'
import { getEnvironmentVariable } from '../../../src/utils'
export interface HuggingFaceInferenceEmbeddingsParams extends EmbeddingsParams {

View File

@ -1,6 +1,6 @@
import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from '@langchain/openai'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getCredentialData, getCredentialParam } from '../../../src/utils'
import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from 'langchain/embeddings/openai'
class LocalAIEmbedding_Embeddings implements INode {
label: string

View File

@ -1,6 +1,6 @@
import { MistralAIEmbeddings, MistralAIEmbeddingsParams } from '@langchain/mistralai'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { MistralAIEmbeddings, MistralAIEmbeddingsParams } from '@langchain/mistralai'
class MistralEmbedding_Embeddings implements INode {
label: string

View File

@ -1,7 +1,7 @@
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama'
import { OllamaInput } from 'langchain/llms/ollama'
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { OllamaInput } from 'langchain/llms/ollama'
import { OllamaEmbeddings } from 'langchain/embeddings/ollama'
class OllamaEmbedding_Embeddings implements INode {
label: string

View File

@ -1,6 +1,6 @@
import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from '@langchain/openai'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from 'langchain/embeddings/openai'
class OpenAIEmbedding_Embeddings implements INode {
label: string

View File

@ -1,6 +1,6 @@
import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from '@langchain/openai'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from 'langchain/embeddings/openai'
class OpenAIEmbeddingCustom_Embeddings implements INode {
label: string

View File

@ -1,9 +1,9 @@
import { Bedrock } from '@langchain/community/llms/bedrock'
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { BaseBedrockInput } from 'langchain/dist/util/bedrock'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { Bedrock } from 'langchain/llms/bedrock'
import { BaseBedrockInput } from 'langchain/dist/util/bedrock'
import { BaseCache } from 'langchain/schema'
import { BaseLLMParams } from 'langchain/llms/base'
/**
* I had to run the following to build the component

View File

@ -1,8 +1,9 @@
import { AzureOpenAIInput, OpenAI, OpenAIInput } from '@langchain/openai'
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { AzureOpenAIInput, OpenAI, OpenAIInput } from 'langchain/llms/openai'
import { BaseCache } from 'langchain/schema'
import { BaseLLMParams } from 'langchain/llms/base'
class AzureOpenAI_LLMs implements INode {
label: string
name: string

View File

@ -1,8 +1,8 @@
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { NIBittensorLLM, BittensorInput } from 'langchain/experimental/llms/bittensor'
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { NIBittensorLLM, BittensorInput } from 'langchain/experimental/llms/bittensor'
import { BaseCache } from 'langchain/schema'
import { BaseLLMParams } from 'langchain/llms/base'
class Bittensor_LLMs implements INode {
label: string

View File

@ -1,7 +1,7 @@
import { BaseCache } from '@langchain/core/caches'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { Cohere, CohereInput } from './core'
import { BaseCache } from 'langchain/schema'
class Cohere_LLMs implements INode {
label: string

View File

@ -1,4 +1,4 @@
import { LLM, BaseLLMParams } from 'langchain/llms/base'
import { LLM, BaseLLMParams } from '@langchain/core/language_models/llms'
export interface CohereInput extends BaseLLMParams {
/** Sampling temperature to use */

View File

@ -1,7 +1,8 @@
import { GooglePaLM, GooglePaLMTextInput } from '@langchain/community/llms/googlepalm'
import { BaseCache } from '@langchain/core/caches'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { GooglePaLM, GooglePaLMTextInput } from 'langchain/llms/googlepalm'
import { BaseCache } from 'langchain/schema'
class GooglePaLM_LLMs implements INode {
label: string
name: string

View File

@ -1,8 +1,8 @@
import { GoogleAuthOptions } from 'google-auth-library'
import { BaseCache } from '@langchain/core/caches'
import { GoogleVertexAI, GoogleVertexAITextInput } from '@langchain/community/llms/googlevertexai'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { GoogleVertexAI, GoogleVertexAITextInput } from 'langchain/llms/googlevertexai'
import { GoogleAuthOptions } from 'google-auth-library'
import { BaseCache } from 'langchain/schema'
class GoogleVertexAI_LLMs implements INode {
label: string

View File

@ -1,7 +1,7 @@
import { BaseCache } from '@langchain/core/caches'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { HFInput, HuggingFaceInference } from './core'
import { BaseCache } from 'langchain/schema'
class HuggingFaceInference_LLMs implements INode {
label: string

View File

@ -1,5 +1,5 @@
import { LLM, BaseLLMParams } from '@langchain/core/language_models/llms'
import { getEnvironmentVariable } from '../../../src/utils'
import { LLM, BaseLLMParams } from 'langchain/llms/base'
export interface HFInput {
/** Model to use */

View File

@ -1,8 +1,8 @@
import { Ollama, OllamaInput } from '@langchain/community/llms/ollama'
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { Ollama, OllamaInput } from 'langchain/llms/ollama'
import { BaseCache } from 'langchain/schema'
import { BaseLLMParams } from 'langchain/llms/base'
class Ollama_LLMs implements INode {
label: string

View File

@ -1,8 +1,8 @@
import { OpenAI, OpenAIInput } from '@langchain/openai'
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { OpenAI, OpenAIInput } from 'langchain/llms/openai'
import { BaseLLMParams } from 'langchain/llms/base'
import { BaseCache } from 'langchain/schema'
class OpenAI_LLMs implements INode {
label: string

View File

@ -1,8 +1,8 @@
import { Replicate, ReplicateInput } from '@langchain/community/llms/replicate'
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { Replicate, ReplicateInput } from 'langchain/llms/replicate'
import { BaseCache } from 'langchain/schema'
import { BaseLLMParams } from 'langchain/llms/base'
class Replicate_LLMs implements INode {
label: string

View File

@ -1,7 +1,7 @@
import { FlowiseMemory, IMessage, INode, INodeData, INodeParams, MemoryMethods } from '../../../src/Interface'
import { convertBaseMessagetoIMessage, getBaseClasses } from '../../../src/utils'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
import { BaseMessage } from 'langchain/schema'
import { BaseMessage } from '@langchain/core/messages'
class BufferMemory_Memory implements INode {
label: string

View File

@ -1,7 +1,7 @@
import { FlowiseWindowMemory, IMessage, INode, INodeData, INodeParams, MemoryMethods } from '../../../src/Interface'
import { convertBaseMessagetoIMessage, getBaseClasses } from '../../../src/utils'
import { BufferWindowMemory, BufferWindowMemoryInput } from 'langchain/memory'
import { BaseMessage } from 'langchain/schema'
import { BaseMessage } from '@langchain/core/messages'
class BufferWindowMemory_Memory implements INode {
label: string

View File

@ -2,7 +2,7 @@ import { FlowiseSummaryMemory, IMessage, INode, INodeData, INodeParams, MemoryMe
import { convertBaseMessagetoIMessage, getBaseClasses } from '../../../src/utils'
import { ConversationSummaryMemory, ConversationSummaryMemoryInput } from 'langchain/memory'
import { BaseLanguageModel } from 'langchain/base_language'
import { BaseMessage } from 'langchain/schema'
import { BaseMessage } from '@langchain/core/messages'
class ConversationSummaryMemory_Memory implements INode {
label: string

View File

@ -9,9 +9,9 @@ import {
DeleteItemCommandInput,
AttributeValue
} from '@aws-sdk/client-dynamodb'
import { DynamoDBChatMessageHistory } from 'langchain/stores/message/dynamodb'
import { DynamoDBChatMessageHistory } from '@langchain/community/stores/message/dynamodb'
import { mapStoredMessageToChatMessage, AIMessage, HumanMessage, StoredMessage, BaseMessage } from '@langchain/core/messages'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
import { mapStoredMessageToChatMessage, AIMessage, HumanMessage, StoredMessage, BaseMessage } from 'langchain/schema'
import { convertBaseMessagetoIMessage, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, MemoryMethods, MessageType } from '../../../src/Interface'

View File

@ -1,7 +1,7 @@
import { MongoClient, Collection, Document } from 'mongodb'
import { MongoDBChatMessageHistory } from 'langchain/stores/message/mongodb'
import { MongoDBChatMessageHistory } from '@langchain/community/stores/message/mongodb'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
import { mapStoredMessageToChatMessage, AIMessage, HumanMessage, BaseMessage } from 'langchain/schema'
import { mapStoredMessageToChatMessage, AIMessage, HumanMessage, BaseMessage } from '@langchain/core/messages'
import { convertBaseMessagetoIMessage, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, MemoryMethods, MessageType } from '../../../src/Interface'

View File

@ -3,7 +3,7 @@ import { convertBaseMessagetoIMessage, getBaseClasses, getCredentialData, getCre
import { ICommonObject } from '../../../src'
import { MotorheadMemory, MotorheadMemoryInput, InputValues, OutputValues } from 'langchain/memory'
import fetch from 'node-fetch'
import { AIMessage, BaseMessage, ChatMessage, HumanMessage } from 'langchain/schema'
import { AIMessage, BaseMessage, ChatMessage, HumanMessage } from '@langchain/core/messages'
type MotorheadMessage = {
content: string

View File

@ -1,8 +1,8 @@
import { Redis, RedisOptions } from 'ioredis'
import { isEqual } from 'lodash'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
import { RedisChatMessageHistory, RedisChatMessageHistoryInput } from 'langchain/stores/message/ioredis'
import { mapStoredMessageToChatMessage, BaseMessage, AIMessage, HumanMessage } from 'langchain/schema'
import { RedisChatMessageHistory, RedisChatMessageHistoryInput } from '@langchain/community/stores/message/ioredis'
import { mapStoredMessageToChatMessage, BaseMessage, AIMessage, HumanMessage } from '@langchain/core/messages'
import { INode, INodeData, INodeParams, ICommonObject, MessageType, IMessage, MemoryMethods, FlowiseMemory } from '../../../src/Interface'
import { convertBaseMessagetoIMessage, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'

View File

@ -1,7 +1,7 @@
import { Redis } from '@upstash/redis'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
import { UpstashRedisChatMessageHistory } from 'langchain/stores/message/upstash_redis'
import { mapStoredMessageToChatMessage, AIMessage, HumanMessage, StoredMessage, BaseMessage } from 'langchain/schema'
import { UpstashRedisChatMessageHistory } from '@langchain/community/stores/message/upstash_redis'
import { mapStoredMessageToChatMessage, AIMessage, HumanMessage, StoredMessage, BaseMessage } from '@langchain/core/messages'
import { FlowiseMemory, IMessage, INode, INodeData, INodeParams, MemoryMethods, MessageType } from '../../../src/Interface'
import { convertBaseMessagetoIMessage, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ICommonObject } from '../../../src/Interface'

View File

@ -1,9 +1,8 @@
import { IMessage, INode, INodeData, INodeParams, MemoryMethods, MessageType } from '../../../src/Interface'
import { convertBaseMessagetoIMessage, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ZepMemory, ZepMemoryInput } from 'langchain/memory/zep'
import { ICommonObject } from '../../../src'
import { ZepMemory, ZepMemoryInput } from '@langchain/community/memory/zep'
import { BaseMessage } from '@langchain/core/messages'
import { InputValues, MemoryVariables, OutputValues } from 'langchain/memory'
import { BaseMessage } from 'langchain/schema'
import { IMessage, INode, INodeData, INodeParams, MemoryMethods, MessageType, ICommonObject } from '../../../src/Interface'
import { convertBaseMessagetoIMessage, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
class ZepMemory_Memory implements INode {
label: string

View File

@ -1,7 +1,7 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src'
import { Moderation } from '../Moderation'
import { OpenAIModerationRunner } from './OpenAIModerationRunner'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src'
class OpenAIModeration implements INode {
label: string

View File

@ -2,7 +2,7 @@ import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src'
import { Moderation } from '../Moderation'
import { SimplePromptModerationRunner } from './SimplePromptModerationRunner'
import { BaseChatModel } from 'langchain/chat_models/base'
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
class SimplePromptModeration implements INode {
label: string

View File

@ -1,5 +1,5 @@
import { Moderation } from '../Moderation'
import { BaseChatModel } from 'langchain/chat_models/base'
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
export class SimplePromptModerationRunner implements Moderation {
private readonly denyList: string = ''

View File

@ -1,6 +1,5 @@
import { BaseOutputParser, CommaSeparatedListOutputParser } from '@langchain/core/output_parsers'
import { getBaseClasses, INode, INodeData, INodeParams } from '../../../src'
import { BaseOutputParser } from 'langchain/schema/output_parser'
import { CommaSeparatedListOutputParser } from 'langchain/output_parsers'
import { CATEGORY } from '../OutputParserHelpers'
class CSVListOutputParser implements INode {

View File

@ -1,7 +1,6 @@
import { getBaseClasses, INode, INodeData, INodeParams } from '../../../src'
import { BaseOutputParser } from 'langchain/schema/output_parser'
import { CustomListOutputParser as LangchainCustomListOutputParser } from 'langchain/output_parsers'
import { BaseOutputParser, CustomListOutputParser as LangchainCustomListOutputParser } from '@langchain/core/output_parsers'
import { CATEGORY } from '../OutputParserHelpers'
import { getBaseClasses, INode, INodeData, INodeParams } from '../../../src'
class CustomListOutputParser implements INode {
label: string

View File

@ -1,8 +1,8 @@
import { BaseOutputParser } from 'langchain/schema/output_parser'
import { BaseOutputParser } from '@langchain/core/output_parsers'
import { ChatPromptTemplate, FewShotPromptTemplate, PromptTemplate, SystemMessagePromptTemplate } from '@langchain/core/prompts'
import { BaseLanguageModel, BaseLanguageModelCallOptions } from '@langchain/core/language_models/base'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel, BaseLanguageModelCallOptions } from 'langchain/base_language'
import { ICommonObject } from '../../src'
import { ChatPromptTemplate, FewShotPromptTemplate, PromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts'
export const CATEGORY = 'Output Parsers'

View File

@ -1,8 +1,8 @@
import { convertSchemaToZod, getBaseClasses, INode, INodeData, INodeParams } from '../../../src'
import { BaseOutputParser } from 'langchain/schema/output_parser'
import { z } from 'zod'
import { BaseOutputParser } from '@langchain/core/output_parsers'
import { StructuredOutputParser as LangchainStructuredOutputParser } from 'langchain/output_parsers'
import { CATEGORY } from '../OutputParserHelpers'
import { z } from 'zod'
import { convertSchemaToZod, getBaseClasses, INode, INodeData, INodeParams } from '../../../src'
class StructuredOutputParser implements INode {
label: string

View File

@ -1,6 +1,6 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate } from 'langchain/prompts'
import { ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate } from '@langchain/core/prompts'
class ChatPromptTemplate_Prompts implements INode {
label: string

View File

@ -1,8 +1,7 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getInputVariables } from '../../../src/utils'
import { FewShotPromptTemplate, FewShotPromptTemplateInput, PromptTemplate } from 'langchain/prompts'
import { Example } from 'langchain/schema'
import { TemplateFormat } from 'langchain/dist/prompts/template'
import { FewShotPromptTemplate, FewShotPromptTemplateInput, PromptTemplate, TemplateFormat } from '@langchain/core/prompts'
import type { Example } from '@langchain/core/prompts'
class FewShotPromptTemplate_Prompts implements INode {
label: string

View File

@ -1,6 +1,6 @@
import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface'
import { getBaseClasses, getInputVariables } from '../../../src/utils'
import { PromptTemplateInput } from 'langchain/prompts'
import { PromptTemplateInput } from '@langchain/core/prompts'
class PromptTemplate_Prompts implements INode {
label: string

View File

@ -1,7 +1,8 @@
import { Callbacks } from 'langchain/callbacks'
import { Document } from 'langchain/document'
import { BaseDocumentCompressor } from 'langchain/retrievers/document_compressors'
import axios from 'axios'
import { Callbacks } from '@langchain/core/callbacks/manager'
import { Document } from '@langchain/core/documents'
import { BaseDocumentCompressor } from 'langchain/retrievers/document_compressors'
export class CohereRerank extends BaseDocumentCompressor {
private cohereAPIKey: any
private COHERE_API_URL = 'https://api.cohere.ai/v1/rerank'

View File

@ -1,9 +1,9 @@
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { BaseRetriever } from 'langchain/schema/retriever'
import { BaseRetriever } from '@langchain/core/retrievers'
import { VectorStoreRetriever } from '@langchain/core/vectorstores'
import { ContextualCompressionRetriever } from 'langchain/retrievers/contextual_compression'
import { getCredentialData, getCredentialParam, handleEscapeCharacters } from '../../../src'
import { CohereRerank } from './CohereRerank'
import { VectorStoreRetriever } from 'langchain/vectorstores/base'
import { getCredentialData, getCredentialParam, handleEscapeCharacters } from '../../../src'
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
class CohereRerankRetriever_Retrievers implements INode {
label: string

View File

@ -1,9 +1,9 @@
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { BaseRetriever } from 'langchain/schema/retriever'
import { Embeddings } from 'langchain/embeddings/base'
import { BaseRetriever } from '@langchain/core/retrievers'
import { Embeddings } from '@langchain/core/embeddings'
import { ContextualCompressionRetriever } from 'langchain/retrievers/contextual_compression'
import { EmbeddingsFilter } from 'langchain/retrievers/document_compressors/embeddings_filter'
import { handleEscapeCharacters } from '../../../src/utils'
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
class EmbeddingsFilterRetriever_Retrievers implements INode {
label: string

View File

@ -1,9 +1,9 @@
import { VectorStore } from 'langchain/vectorstores/base'
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { VectorStore } from '@langchain/core/vectorstores'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { PromptTemplate } from '@langchain/core/prompts'
import { HydeRetriever, HydeRetrieverOptions, PromptKey } from 'langchain/retrievers/hyde'
import { BaseLanguageModel } from 'langchain/base_language'
import { PromptTemplate } from 'langchain/prompts'
import { handleEscapeCharacters } from '../../../src/utils'
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
class HydeRetriever_Retrievers implements INode {
label: string

Some files were not shown because too many files have changed in this diff Show More