Merge branch 'main' into add_mmr_vectara
This commit is contained in:
commit
dd07c3d778
16
LICENSE.md
16
LICENSE.md
|
|
@ -2,22 +2,6 @@
|
||||||
Version 2.0, January 2004
|
Version 2.0, January 2004
|
||||||
http://www.apache.org/licenses/
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
Flowise is governed by the Apache License 2.0, with additional terms and conditions outlined below:
|
|
||||||
|
|
||||||
Flowise can be used for commercial purposes for "backend-as-a-service" for your applications or as a development platform for enterprises. However, under specific conditions, you must reach out to the project's administrators to secure a commercial license:
|
|
||||||
|
|
||||||
a. Multi-tenant SaaS service: Unless you have explicit written authorization from Flowise, you may not utilize the Flowise source code to operate a multi-tenant SaaS service that closely resembles the Flowise cloud-based services.
|
|
||||||
b. Logo and copyright information: While using Flowise in commercial application, you are prohibited from removing or altering the LOGO or copyright information displayed in the Flowise console and UI.
|
|
||||||
|
|
||||||
For inquiries regarding licensing matters, please contact hello@flowiseai.com via email.
|
|
||||||
|
|
||||||
Contributors are required to consent to the following terms related to their contributed code:
|
|
||||||
|
|
||||||
a. The project maintainers have the authority to modify the open-source agreement to be more stringent or lenient.
|
|
||||||
b. Contributed code can be used for commercial purposes, including Flowise's cloud-based services.
|
|
||||||
|
|
||||||
All other rights and restrictions are in accordance with the Apache License 2.0.
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
1. Definitions.
|
1. Definitions.
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "flowise",
|
"name": "flowise",
|
||||||
"version": "1.4.8",
|
"version": "1.4.9",
|
||||||
"private": true,
|
"private": true,
|
||||||
"homepage": "https://flowiseai.com",
|
"homepage": "https://flowiseai.com",
|
||||||
"workspaces": [
|
"workspaces": [
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
import { OpenAIBaseInput } from 'langchain/dist/types/openai-types'
|
|
||||||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||||
import { AzureOpenAIInput, ChatOpenAI } from 'langchain/chat_models/openai'
|
import { AzureOpenAIInput, ChatOpenAI, OpenAIChatInput } from 'langchain/chat_models/openai'
|
||||||
import { BaseCache } from 'langchain/schema'
|
import { BaseCache } from 'langchain/schema'
|
||||||
import { BaseLLMParams } from 'langchain/llms/base'
|
import { BaseLLMParams } from 'langchain/llms/base'
|
||||||
|
|
||||||
|
|
@ -123,7 +122,7 @@ class AzureChatOpenAI_ChatModels implements INode {
|
||||||
const azureOpenAIApiDeploymentName = getCredentialParam('azureOpenAIApiDeploymentName', credentialData, nodeData)
|
const azureOpenAIApiDeploymentName = getCredentialParam('azureOpenAIApiDeploymentName', credentialData, nodeData)
|
||||||
const azureOpenAIApiVersion = getCredentialParam('azureOpenAIApiVersion', credentialData, nodeData)
|
const azureOpenAIApiVersion = getCredentialParam('azureOpenAIApiVersion', credentialData, nodeData)
|
||||||
|
|
||||||
const obj: Partial<AzureOpenAIInput> & BaseLLMParams & Partial<OpenAIBaseInput> = {
|
const obj: Partial<AzureOpenAIInput> & BaseLLMParams & Partial<OpenAIChatInput> = {
|
||||||
temperature: parseFloat(temperature),
|
temperature: parseFloat(temperature),
|
||||||
modelName,
|
modelName,
|
||||||
azureOpenAIApiKey,
|
azureOpenAIApiKey,
|
||||||
|
|
|
||||||
|
|
@ -124,13 +124,13 @@ class ChatMistral_ChatModels implements INode {
|
||||||
const safeMode = nodeData.inputs?.safeMode as boolean
|
const safeMode = nodeData.inputs?.safeMode as boolean
|
||||||
const randomSeed = nodeData.inputs?.safeMode as string
|
const randomSeed = nodeData.inputs?.safeMode as string
|
||||||
const overrideEndpoint = nodeData.inputs?.overrideEndpoint as string
|
const overrideEndpoint = nodeData.inputs?.overrideEndpoint as string
|
||||||
// Waiting fix from langchain + mistral to enable streaming - https://github.com/mistralai/client-js/issues/18
|
const streaming = nodeData.inputs?.streaming as boolean
|
||||||
|
|
||||||
const cache = nodeData.inputs?.cache as BaseCache
|
const cache = nodeData.inputs?.cache as BaseCache
|
||||||
|
|
||||||
const obj: ChatMistralAIInput = {
|
const obj: ChatMistralAIInput = {
|
||||||
apiKey: apiKey,
|
apiKey: apiKey,
|
||||||
modelName: modelName
|
modelName: modelName,
|
||||||
|
streaming: streaming ?? true
|
||||||
}
|
}
|
||||||
|
|
||||||
if (maxOutputTokens) obj.maxTokens = parseInt(maxOutputTokens, 10)
|
if (maxOutputTokens) obj.maxTokens = parseInt(maxOutputTokens, 10)
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,7 @@
|
||||||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||||
import { getBaseClasses } from '../../../src/utils'
|
import { getBaseClasses } from '../../../src/utils'
|
||||||
import { ChatOllama } from 'langchain/chat_models/ollama'
|
import { ChatOllama, ChatOllamaInput } from 'langchain/chat_models/ollama'
|
||||||
import { BaseCache } from 'langchain/schema'
|
import { BaseCache } from 'langchain/schema'
|
||||||
import { OllamaInput } from 'langchain/dist/util/ollama'
|
|
||||||
import { BaseLLMParams } from 'langchain/llms/base'
|
import { BaseLLMParams } from 'langchain/llms/base'
|
||||||
|
|
||||||
class ChatOllama_ChatModels implements INode {
|
class ChatOllama_ChatModels implements INode {
|
||||||
|
|
@ -209,7 +208,7 @@ class ChatOllama_ChatModels implements INode {
|
||||||
|
|
||||||
const cache = nodeData.inputs?.cache as BaseCache
|
const cache = nodeData.inputs?.cache as BaseCache
|
||||||
|
|
||||||
const obj: OllamaInput & BaseLLMParams = {
|
const obj: ChatOllamaInput & BaseLLMParams = {
|
||||||
baseUrl,
|
baseUrl,
|
||||||
temperature: parseFloat(temperature),
|
temperature: parseFloat(temperature),
|
||||||
model: modelName
|
model: modelName
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,7 @@ class Airtable_DocumentLoaders implements INode {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.label = 'Airtable'
|
this.label = 'Airtable'
|
||||||
this.name = 'airtable'
|
this.name = 'airtable'
|
||||||
this.version = 1.0
|
this.version = 2.0
|
||||||
this.type = 'Document'
|
this.type = 'Document'
|
||||||
this.icon = 'airtable.svg'
|
this.icon = 'airtable.svg'
|
||||||
this.category = 'Document Loaders'
|
this.category = 'Document Loaders'
|
||||||
|
|
@ -55,6 +55,15 @@ class Airtable_DocumentLoaders implements INode {
|
||||||
description:
|
description:
|
||||||
'If your table URL looks like: https://airtable.com/app11RobdGoX0YNsC/tblJdmvbrgizbYICO/viw9UrP77Id0CE4ee, tblJdmvbrgizbYICO is the table id'
|
'If your table URL looks like: https://airtable.com/app11RobdGoX0YNsC/tblJdmvbrgizbYICO/viw9UrP77Id0CE4ee, tblJdmvbrgizbYICO is the table id'
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
label: 'View Id',
|
||||||
|
name: 'viewId',
|
||||||
|
type: 'string',
|
||||||
|
placeholder: 'viw9UrP77Id0CE4ee',
|
||||||
|
description:
|
||||||
|
'If your view URL looks like: https://airtable.com/app11RobdGoX0YNsC/tblJdmvbrgizbYICO/viw9UrP77Id0CE4ee, viw9UrP77Id0CE4ee is the view id',
|
||||||
|
optional: true
|
||||||
|
},
|
||||||
{
|
{
|
||||||
label: 'Return All',
|
label: 'Return All',
|
||||||
name: 'returnAll',
|
name: 'returnAll',
|
||||||
|
|
@ -83,6 +92,7 @@ class Airtable_DocumentLoaders implements INode {
|
||||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||||
const baseId = nodeData.inputs?.baseId as string
|
const baseId = nodeData.inputs?.baseId as string
|
||||||
const tableId = nodeData.inputs?.tableId as string
|
const tableId = nodeData.inputs?.tableId as string
|
||||||
|
const viewId = nodeData.inputs?.viewId as string
|
||||||
const returnAll = nodeData.inputs?.returnAll as boolean
|
const returnAll = nodeData.inputs?.returnAll as boolean
|
||||||
const limit = nodeData.inputs?.limit as string
|
const limit = nodeData.inputs?.limit as string
|
||||||
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
|
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
|
||||||
|
|
@ -94,6 +104,7 @@ class Airtable_DocumentLoaders implements INode {
|
||||||
const airtableOptions: AirtableLoaderParams = {
|
const airtableOptions: AirtableLoaderParams = {
|
||||||
baseId,
|
baseId,
|
||||||
tableId,
|
tableId,
|
||||||
|
viewId,
|
||||||
returnAll,
|
returnAll,
|
||||||
accessToken,
|
accessToken,
|
||||||
limit: limit ? parseInt(limit, 10) : 100
|
limit: limit ? parseInt(limit, 10) : 100
|
||||||
|
|
@ -133,6 +144,7 @@ interface AirtableLoaderParams {
|
||||||
baseId: string
|
baseId: string
|
||||||
tableId: string
|
tableId: string
|
||||||
accessToken: string
|
accessToken: string
|
||||||
|
viewId?: string
|
||||||
limit?: number
|
limit?: number
|
||||||
returnAll?: boolean
|
returnAll?: boolean
|
||||||
}
|
}
|
||||||
|
|
@ -153,16 +165,19 @@ class AirtableLoader extends BaseDocumentLoader {
|
||||||
|
|
||||||
public readonly tableId: string
|
public readonly tableId: string
|
||||||
|
|
||||||
|
public readonly viewId?: string
|
||||||
|
|
||||||
public readonly accessToken: string
|
public readonly accessToken: string
|
||||||
|
|
||||||
public readonly limit: number
|
public readonly limit: number
|
||||||
|
|
||||||
public readonly returnAll: boolean
|
public readonly returnAll: boolean
|
||||||
|
|
||||||
constructor({ baseId, tableId, accessToken, limit = 100, returnAll = false }: AirtableLoaderParams) {
|
constructor({ baseId, tableId, viewId, accessToken, limit = 100, returnAll = false }: AirtableLoaderParams) {
|
||||||
super()
|
super()
|
||||||
this.baseId = baseId
|
this.baseId = baseId
|
||||||
this.tableId = tableId
|
this.tableId = tableId
|
||||||
|
this.viewId = viewId
|
||||||
this.accessToken = accessToken
|
this.accessToken = accessToken
|
||||||
this.limit = limit
|
this.limit = limit
|
||||||
this.returnAll = returnAll
|
this.returnAll = returnAll
|
||||||
|
|
@ -203,7 +218,7 @@ class AirtableLoader extends BaseDocumentLoader {
|
||||||
}
|
}
|
||||||
|
|
||||||
private async loadLimit(): Promise<Document[]> {
|
private async loadLimit(): Promise<Document[]> {
|
||||||
const params = { maxRecords: this.limit }
|
const params = { maxRecords: this.limit, view: this.viewId }
|
||||||
const data = await this.fetchAirtableData(`https://api.airtable.com/v0/${this.baseId}/${this.tableId}`, params)
|
const data = await this.fetchAirtableData(`https://api.airtable.com/v0/${this.baseId}/${this.tableId}`, params)
|
||||||
if (data.records.length === 0) {
|
if (data.records.length === 0) {
|
||||||
return []
|
return []
|
||||||
|
|
@ -212,7 +227,7 @@ class AirtableLoader extends BaseDocumentLoader {
|
||||||
}
|
}
|
||||||
|
|
||||||
private async loadAll(): Promise<Document[]> {
|
private async loadAll(): Promise<Document[]> {
|
||||||
const params: ICommonObject = { pageSize: 100 }
|
const params: ICommonObject = { pageSize: 100, view: this.viewId }
|
||||||
let data: AirtableLoaderResponse
|
let data: AirtableLoaderResponse
|
||||||
let returnPages: AirtableLoaderPage[] = []
|
let returnPages: AirtableLoaderPage[] = []
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||||
import { getBaseClasses } from '../../../src/utils'
|
import { getBaseClasses } from '../../../src/utils'
|
||||||
|
import { OllamaInput } from 'langchain/llms/ollama'
|
||||||
import { OllamaEmbeddings } from 'langchain/embeddings/ollama'
|
import { OllamaEmbeddings } from 'langchain/embeddings/ollama'
|
||||||
import { OllamaInput } from 'langchain/dist/util/ollama'
|
|
||||||
|
|
||||||
class OllamaEmbedding_Embeddings implements INode {
|
class OllamaEmbedding_Embeddings implements INode {
|
||||||
label: string
|
label: string
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,7 @@
|
||||||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||||
import { getBaseClasses } from '../../../src/utils'
|
import { getBaseClasses } from '../../../src/utils'
|
||||||
import { Ollama } from 'langchain/llms/ollama'
|
import { Ollama, OllamaInput } from 'langchain/llms/ollama'
|
||||||
import { BaseCache } from 'langchain/schema'
|
import { BaseCache } from 'langchain/schema'
|
||||||
import { OllamaInput } from 'langchain/dist/util/ollama'
|
|
||||||
import { BaseLLMParams } from 'langchain/llms/base'
|
import { BaseLLMParams } from 'langchain/llms/base'
|
||||||
|
|
||||||
class Ollama_LLMs implements INode {
|
class Ollama_LLMs implements INode {
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@ class Postgres_VectorStores implements INode {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.label = 'Postgres'
|
this.label = 'Postgres'
|
||||||
this.name = 'postgres'
|
this.name = 'postgres'
|
||||||
this.version = 1.0
|
this.version = 2.0
|
||||||
this.type = 'Postgres'
|
this.type = 'Postgres'
|
||||||
this.icon = 'postgres.svg'
|
this.icon = 'postgres.svg'
|
||||||
this.category = 'Vector Stores'
|
this.category = 'Vector Stores'
|
||||||
|
|
@ -60,6 +60,13 @@ class Postgres_VectorStores implements INode {
|
||||||
name: 'database',
|
name: 'database',
|
||||||
type: 'string'
|
type: 'string'
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
label: 'SSL Connection',
|
||||||
|
name: 'sslConnection',
|
||||||
|
type: 'boolean',
|
||||||
|
default: false,
|
||||||
|
optional: false
|
||||||
|
},
|
||||||
{
|
{
|
||||||
label: 'Port',
|
label: 'Port',
|
||||||
name: 'port',
|
name: 'port',
|
||||||
|
|
@ -117,6 +124,7 @@ class Postgres_VectorStores implements INode {
|
||||||
const docs = nodeData.inputs?.document as Document[]
|
const docs = nodeData.inputs?.document as Document[]
|
||||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||||
const additionalConfig = nodeData.inputs?.additionalConfig as string
|
const additionalConfig = nodeData.inputs?.additionalConfig as string
|
||||||
|
const sslConnection = nodeData.inputs?.sslConnection as boolean
|
||||||
|
|
||||||
let additionalConfiguration = {}
|
let additionalConfiguration = {}
|
||||||
if (additionalConfig) {
|
if (additionalConfig) {
|
||||||
|
|
@ -134,7 +142,8 @@ class Postgres_VectorStores implements INode {
|
||||||
port: nodeData.inputs?.port as number,
|
port: nodeData.inputs?.port as number,
|
||||||
username: user,
|
username: user,
|
||||||
password: password,
|
password: password,
|
||||||
database: nodeData.inputs?.database as string
|
database: nodeData.inputs?.database as string,
|
||||||
|
ssl: sslConnection
|
||||||
}
|
}
|
||||||
|
|
||||||
const args = {
|
const args = {
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ class Postgres_Existing_VectorStores implements INode {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.label = 'Postgres Load Existing Index'
|
this.label = 'Postgres Load Existing Index'
|
||||||
this.name = 'postgresExistingIndex'
|
this.name = 'postgresExistingIndex'
|
||||||
this.version = 1.0
|
this.version = 2.0
|
||||||
this.type = 'Postgres'
|
this.type = 'Postgres'
|
||||||
this.icon = 'postgres.svg'
|
this.icon = 'postgres.svg'
|
||||||
this.category = 'Vector Stores'
|
this.category = 'Vector Stores'
|
||||||
|
|
@ -52,6 +52,13 @@ class Postgres_Existing_VectorStores implements INode {
|
||||||
name: 'database',
|
name: 'database',
|
||||||
type: 'string'
|
type: 'string'
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
label: 'SSL Connection',
|
||||||
|
name: 'sslConnection',
|
||||||
|
type: 'boolean',
|
||||||
|
default: false,
|
||||||
|
optional: false
|
||||||
|
},
|
||||||
{
|
{
|
||||||
label: 'Port',
|
label: 'Port',
|
||||||
name: 'port',
|
name: 'port',
|
||||||
|
|
@ -109,6 +116,7 @@ class Postgres_Existing_VectorStores implements INode {
|
||||||
const output = nodeData.outputs?.output as string
|
const output = nodeData.outputs?.output as string
|
||||||
const topK = nodeData.inputs?.topK as string
|
const topK = nodeData.inputs?.topK as string
|
||||||
const k = topK ? parseFloat(topK) : 4
|
const k = topK ? parseFloat(topK) : 4
|
||||||
|
const sslConnection = nodeData.inputs?.sslConnection as boolean
|
||||||
|
|
||||||
let additionalConfiguration = {}
|
let additionalConfiguration = {}
|
||||||
if (additionalConfig) {
|
if (additionalConfig) {
|
||||||
|
|
@ -126,7 +134,8 @@ class Postgres_Existing_VectorStores implements INode {
|
||||||
port: nodeData.inputs?.port as number,
|
port: nodeData.inputs?.port as number,
|
||||||
username: user,
|
username: user,
|
||||||
password: password,
|
password: password,
|
||||||
database: nodeData.inputs?.database as string
|
database: nodeData.inputs?.database as string,
|
||||||
|
ssl: sslConnection
|
||||||
}
|
}
|
||||||
|
|
||||||
const args = {
|
const args = {
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@ class PostgresUpsert_VectorStores implements INode {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.label = 'Postgres Upsert Document'
|
this.label = 'Postgres Upsert Document'
|
||||||
this.name = 'postgresUpsert'
|
this.name = 'postgresUpsert'
|
||||||
this.version = 1.0
|
this.version = 2.0
|
||||||
this.type = 'Postgres'
|
this.type = 'Postgres'
|
||||||
this.icon = 'postgres.svg'
|
this.icon = 'postgres.svg'
|
||||||
this.category = 'Vector Stores'
|
this.category = 'Vector Stores'
|
||||||
|
|
@ -59,6 +59,13 @@ class PostgresUpsert_VectorStores implements INode {
|
||||||
name: 'database',
|
name: 'database',
|
||||||
type: 'string'
|
type: 'string'
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
label: 'SSL Connection',
|
||||||
|
name: 'sslConnection',
|
||||||
|
type: 'boolean',
|
||||||
|
default: false,
|
||||||
|
optional: false
|
||||||
|
},
|
||||||
{
|
{
|
||||||
label: 'Port',
|
label: 'Port',
|
||||||
name: 'port',
|
name: 'port',
|
||||||
|
|
@ -117,6 +124,7 @@ class PostgresUpsert_VectorStores implements INode {
|
||||||
const output = nodeData.outputs?.output as string
|
const output = nodeData.outputs?.output as string
|
||||||
const topK = nodeData.inputs?.topK as string
|
const topK = nodeData.inputs?.topK as string
|
||||||
const k = topK ? parseFloat(topK) : 4
|
const k = topK ? parseFloat(topK) : 4
|
||||||
|
const sslConnection = nodeData.inputs?.sslConnection as boolean
|
||||||
|
|
||||||
let additionalConfiguration = {}
|
let additionalConfiguration = {}
|
||||||
if (additionalConfig) {
|
if (additionalConfig) {
|
||||||
|
|
@ -134,7 +142,8 @@ class PostgresUpsert_VectorStores implements INode {
|
||||||
port: nodeData.inputs?.port as number,
|
port: nodeData.inputs?.port as number,
|
||||||
username: user,
|
username: user,
|
||||||
password: password,
|
password: password,
|
||||||
database: nodeData.inputs?.database as string
|
database: nodeData.inputs?.database as string,
|
||||||
|
ssl: sslConnection
|
||||||
}
|
}
|
||||||
|
|
||||||
const args = {
|
const args = {
|
||||||
|
|
|
||||||
|
|
@ -149,9 +149,12 @@ class Qdrant_VectorStores implements INode {
|
||||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||||
const qdrantApiKey = getCredentialParam('qdrantApiKey', credentialData, nodeData)
|
const qdrantApiKey = getCredentialParam('qdrantApiKey', credentialData, nodeData)
|
||||||
|
|
||||||
|
const port = Qdrant_VectorStores.determinePortByUrl(qdrantServerUrl)
|
||||||
|
|
||||||
const client = new QdrantClient({
|
const client = new QdrantClient({
|
||||||
url: qdrantServerUrl,
|
url: qdrantServerUrl,
|
||||||
apiKey: qdrantApiKey
|
apiKey: qdrantApiKey,
|
||||||
|
port: port
|
||||||
})
|
})
|
||||||
|
|
||||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||||
|
|
@ -198,9 +201,12 @@ class Qdrant_VectorStores implements INode {
|
||||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||||
const qdrantApiKey = getCredentialParam('qdrantApiKey', credentialData, nodeData)
|
const qdrantApiKey = getCredentialParam('qdrantApiKey', credentialData, nodeData)
|
||||||
|
|
||||||
|
const port = Qdrant_VectorStores.determinePortByUrl(qdrantServerUrl)
|
||||||
|
|
||||||
const client = new QdrantClient({
|
const client = new QdrantClient({
|
||||||
url: qdrantServerUrl,
|
url: qdrantServerUrl,
|
||||||
apiKey: qdrantApiKey
|
apiKey: qdrantApiKey,
|
||||||
|
port: port
|
||||||
})
|
})
|
||||||
|
|
||||||
const dbConfig: QdrantLibArgs = {
|
const dbConfig: QdrantLibArgs = {
|
||||||
|
|
@ -242,6 +248,28 @@ class Qdrant_VectorStores implements INode {
|
||||||
}
|
}
|
||||||
return vectorStore
|
return vectorStore
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine the port number from the given URL.
|
||||||
|
*
|
||||||
|
* The problem is when not doing this the qdrant-client.js will fall back on 6663 when you enter a port 443 and 80.
|
||||||
|
* See: https://stackoverflow.com/questions/59104197/nodejs-new-url-urlhttps-myurl-com80-lists-the-port-as-empty
|
||||||
|
* @param qdrantServerUrl the url to get the port from
|
||||||
|
*/
|
||||||
|
static determinePortByUrl(qdrantServerUrl: string): number {
|
||||||
|
const parsedUrl = new URL(qdrantServerUrl)
|
||||||
|
|
||||||
|
let port = parsedUrl.port ? parseInt(parsedUrl.port) : 6663
|
||||||
|
|
||||||
|
if (parsedUrl.protocol === 'https:' && parsedUrl.port === '') {
|
||||||
|
port = 443
|
||||||
|
}
|
||||||
|
if (parsedUrl.protocol === 'http:' && parsedUrl.port === '') {
|
||||||
|
port = 80
|
||||||
|
}
|
||||||
|
|
||||||
|
return port
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { nodeClass: Qdrant_VectorStores }
|
module.exports = { nodeClass: Qdrant_VectorStores }
|
||||||
|
|
|
||||||
|
|
@ -26,8 +26,8 @@
|
||||||
"@gomomento/sdk-core": "^1.51.1",
|
"@gomomento/sdk-core": "^1.51.1",
|
||||||
"@google-ai/generativelanguage": "^0.2.1",
|
"@google-ai/generativelanguage": "^0.2.1",
|
||||||
"@huggingface/inference": "^2.6.1",
|
"@huggingface/inference": "^2.6.1",
|
||||||
"@langchain/google-genai": "^0.0.3",
|
"@langchain/google-genai": "^0.0.6",
|
||||||
"@langchain/mistralai": "^0.0.3",
|
"@langchain/mistralai": "^0.0.6",
|
||||||
"@notionhq/client": "^2.2.8",
|
"@notionhq/client": "^2.2.8",
|
||||||
"@opensearch-project/opensearch": "^1.2.0",
|
"@opensearch-project/opensearch": "^1.2.0",
|
||||||
"@pinecone-database/pinecone": "^1.1.1",
|
"@pinecone-database/pinecone": "^1.1.1",
|
||||||
|
|
@ -52,10 +52,10 @@
|
||||||
"html-to-text": "^9.0.5",
|
"html-to-text": "^9.0.5",
|
||||||
"husky": "^8.0.3",
|
"husky": "^8.0.3",
|
||||||
"ioredis": "^5.3.2",
|
"ioredis": "^5.3.2",
|
||||||
"langchain": "^0.0.213",
|
"langchain": "^0.0.214",
|
||||||
"langfuse": "^1.2.0",
|
"langfuse": "2.0.2",
|
||||||
"langfuse-langchain": "^1.0.31",
|
"langfuse-langchain": "2.0.2",
|
||||||
"langsmith": "^0.0.49",
|
"langsmith": "0.0.53",
|
||||||
"linkifyjs": "^4.1.1",
|
"linkifyjs": "^4.1.1",
|
||||||
"llmonitor": "^0.5.5",
|
"llmonitor": "^0.5.5",
|
||||||
"mammoth": "^1.5.1",
|
"mammoth": "^1.5.1",
|
||||||
|
|
|
||||||
|
|
@ -538,7 +538,7 @@ export class AnalyticHandler {
|
||||||
if (trace) {
|
if (trace) {
|
||||||
const generation = trace.generation({
|
const generation = trace.generation({
|
||||||
name,
|
name,
|
||||||
prompt: input
|
input: input
|
||||||
})
|
})
|
||||||
this.handlers['langFuse'].generation = { [generation.id]: generation }
|
this.handlers['langFuse'].generation = { [generation.id]: generation }
|
||||||
returnIds['langFuse'].generation = generation.id
|
returnIds['langFuse'].generation = generation.id
|
||||||
|
|
@ -583,7 +583,7 @@ export class AnalyticHandler {
|
||||||
const generation: LangfuseGenerationClient | undefined = this.handlers['langFuse'].generation[returnIds['langFuse'].generation]
|
const generation: LangfuseGenerationClient | undefined = this.handlers['langFuse'].generation[returnIds['langFuse'].generation]
|
||||||
if (generation) {
|
if (generation) {
|
||||||
generation.end({
|
generation.end({
|
||||||
completion: output
|
output: output
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -618,7 +618,7 @@ export class AnalyticHandler {
|
||||||
const generation: LangfuseGenerationClient | undefined = this.handlers['langFuse'].generation[returnIds['langFuse'].generation]
|
const generation: LangfuseGenerationClient | undefined = this.handlers['langFuse'].generation[returnIds['langFuse'].generation]
|
||||||
if (generation) {
|
if (generation) {
|
||||||
generation.end({
|
generation.end({
|
||||||
completion: error
|
output: error
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "flowise",
|
"name": "flowise",
|
||||||
"version": "1.4.8",
|
"version": "1.4.9",
|
||||||
"description": "Flowiseai Server",
|
"description": "Flowiseai Server",
|
||||||
"main": "dist/index",
|
"main": "dist/index",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|
|
||||||
|
|
@ -818,7 +818,7 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component
|
||||||
*/
|
*/
|
||||||
export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNodeData: INodeData) => {
|
export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNodeData: INodeData) => {
|
||||||
const streamAvailableLLMs = {
|
const streamAvailableLLMs = {
|
||||||
'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama', 'awsChatBedrock'],
|
'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama', 'awsChatBedrock', 'chatMistralAI'],
|
||||||
LLMs: ['azureOpenAI', 'openAI', 'ollama']
|
LLMs: ['azureOpenAI', 'openAI', 'ollama']
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -875,7 +875,9 @@ export const getEncryptionKey = async (): Promise<string> => {
|
||||||
return await fs.promises.readFile(getEncryptionKeyPath(), 'utf8')
|
return await fs.promises.readFile(getEncryptionKeyPath(), 'utf8')
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const encryptKey = generateEncryptKey()
|
const encryptKey = generateEncryptKey()
|
||||||
const defaultLocation = path.join(getUserHome(), '.flowise', 'encryption.key')
|
const defaultLocation = process.env.SECRETKEY_PATH
|
||||||
|
? path.join(process.env.SECRETKEY_PATH, 'encryption.key')
|
||||||
|
: path.join(getUserHome(), '.flowise', 'encryption.key')
|
||||||
await fs.promises.writeFile(defaultLocation, encryptKey)
|
await fs.promises.writeFile(defaultLocation, encryptKey)
|
||||||
return encryptKey
|
return encryptKey
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue