Add batchSize parameter to Qdrant_VectorStores and QdrantUpsert_Vecto… (#2215)
* Add batchSize parameter to Qdrant_VectorStores and QdrantUpsert_VectorStores * Add optional parameter 'optional' to batchSize in Qdrant_VectorStores and QdrantUpsert_VectorStores also try calling normal mode if it fail failback to batch mode * Update default values for batchSize in Qdrant_VectorStores and QdrantUpsert_VectorStores * Update PNPM version to v9.0.2 in Dockerfile * Update Qdrant.ts * Update Dockerfile * Update Qdrant_Upsert.ts * Update Qdrant.ts * lint fix Qdrant_Upsert.ts * increment qdrant node version * increment qdrant upsert node version --------- Co-authored-by: Henry Heng <henryheng@flowiseai.com>
This commit is contained in:
parent
5775947586
commit
c4eb75ddde
|
|
@ -32,7 +32,7 @@ class Qdrant_VectorStores implements INode {
|
|||
constructor() {
|
||||
this.label = 'Qdrant'
|
||||
this.name = 'qdrant'
|
||||
this.version = 2.0
|
||||
this.version = 3.0
|
||||
this.type = 'Qdrant'
|
||||
this.icon = 'qdrant.png'
|
||||
this.category = 'Vector Stores'
|
||||
|
|
@ -86,6 +86,15 @@ class Qdrant_VectorStores implements INode {
|
|||
default: 1536,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Upsert Batch Size',
|
||||
name: 'batchSize',
|
||||
type: 'number',
|
||||
step: 1,
|
||||
description: 'Upsert in batches of size N',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Similarity',
|
||||
name: 'qdrantSimilarity',
|
||||
|
|
@ -159,6 +168,7 @@ class Qdrant_VectorStores implements INode {
|
|||
const qdrantSimilarity = nodeData.inputs?.qdrantSimilarity
|
||||
const qdrantVectorDimension = nodeData.inputs?.qdrantVectorDimension
|
||||
const recordManager = nodeData.inputs?.recordManager
|
||||
const _batchSize = nodeData.inputs?.batchSize
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const qdrantApiKey = getCredentialParam('qdrantApiKey', credentialData, nodeData)
|
||||
|
|
@ -218,10 +228,21 @@ class Qdrant_VectorStores implements INode {
|
|||
}))
|
||||
|
||||
try {
|
||||
await client.upsert(collectionName, {
|
||||
wait: true,
|
||||
points
|
||||
})
|
||||
if (_batchSize) {
|
||||
const batchSize = parseInt(_batchSize, 10)
|
||||
for (let i = 0; i < points.length; i += batchSize) {
|
||||
const batchPoints = points.slice(i, i + batchSize)
|
||||
await client.upsert(collectionName, {
|
||||
wait: true,
|
||||
points: batchPoints
|
||||
})
|
||||
}
|
||||
} else {
|
||||
await client.upsert(collectionName, {
|
||||
wait: true,
|
||||
points
|
||||
})
|
||||
}
|
||||
} catch (e: any) {
|
||||
const error = new Error(`${e?.status ?? 'Undefined error code'} ${e?.message}: ${e?.data?.status?.error}`)
|
||||
throw error
|
||||
|
|
@ -257,7 +278,15 @@ class Qdrant_VectorStores implements INode {
|
|||
|
||||
return res
|
||||
} else {
|
||||
await QdrantVectorStore.fromDocuments(finalDocs, embeddings, dbConfig)
|
||||
if (_batchSize) {
|
||||
const batchSize = parseInt(_batchSize, 10)
|
||||
for (let i = 0; i < finalDocs.length; i += batchSize) {
|
||||
const batch = finalDocs.slice(i, i + batchSize)
|
||||
await QdrantVectorStore.fromDocuments(batch, embeddings, dbConfig)
|
||||
}
|
||||
} else {
|
||||
await QdrantVectorStore.fromDocuments(finalDocs, embeddings, dbConfig)
|
||||
}
|
||||
return { numAdded: finalDocs.length, addedDocs: finalDocs }
|
||||
}
|
||||
} catch (e) {
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ class QdrantUpsert_VectorStores implements INode {
|
|||
constructor() {
|
||||
this.label = 'Qdrant Upsert Document'
|
||||
this.name = 'qdrantUpsert'
|
||||
this.version = 2.0
|
||||
this.version = 3.0
|
||||
this.type = 'Qdrant'
|
||||
this.icon = 'qdrant.png'
|
||||
this.category = 'Vector Stores'
|
||||
|
|
@ -71,6 +71,15 @@ class QdrantUpsert_VectorStores implements INode {
|
|||
default: 1536,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Upsert Batch Size',
|
||||
name: 'batchSize',
|
||||
type: 'number',
|
||||
step: 1,
|
||||
description: 'Upsert in batches of size N',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Similarity',
|
||||
name: 'qdrantSimilarity',
|
||||
|
|
@ -132,6 +141,7 @@ class QdrantUpsert_VectorStores implements INode {
|
|||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const qdrantSimilarity = nodeData.inputs?.qdrantSimilarity
|
||||
const qdrantVectorDimension = nodeData.inputs?.qdrantVectorDimension
|
||||
const _batchSize = nodeData.inputs?.batchSize
|
||||
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
|
|
@ -174,16 +184,29 @@ class QdrantUpsert_VectorStores implements INode {
|
|||
retrieverConfig.filter = typeof queryFilter === 'object' ? queryFilter : JSON.parse(queryFilter)
|
||||
}
|
||||
|
||||
const vectorStore = await QdrantVectorStore.fromDocuments(finalDocs, embeddings, dbConfig)
|
||||
let vectorStore: QdrantVectorStore | undefined = undefined
|
||||
if (_batchSize) {
|
||||
const batchSize = parseInt(_batchSize, 10)
|
||||
for (let i = 0; i < finalDocs.length; i += batchSize) {
|
||||
const batch = finalDocs.slice(i, i + batchSize)
|
||||
vectorStore = await QdrantVectorStore.fromDocuments(batch, embeddings, dbConfig)
|
||||
}
|
||||
} else {
|
||||
vectorStore = await QdrantVectorStore.fromDocuments(finalDocs, embeddings, dbConfig)
|
||||
}
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(retrieverConfig)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
if (vectorStore === undefined) {
|
||||
throw new Error('No documents to upsert')
|
||||
} else {
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(retrieverConfig)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue