diff --git a/packages/components/nodes/chains/VectaraChain/VectaraChain.ts b/packages/components/nodes/chains/VectaraChain/VectaraChain.ts index 3799d062f..c80b354f7 100644 --- a/packages/components/nodes/chains/VectaraChain/VectaraChain.ts +++ b/packages/components/nodes/chains/VectaraChain/VectaraChain.ts @@ -69,22 +69,23 @@ class VectaraChain_Chains implements INode { options: [ { label: 'vectara-summary-ext-v1.2.0 (gpt-3.5-turbo)', - name: 'vectara-summary-ext-v1.2.0' + name: 'vectara-summary-ext-v1.2.0', + description: 'base summarizer, available to all Vectara users' }, { label: 'vectara-experimental-summary-ext-2023-10-23-small (gpt-3.5-turbo)', name: 'vectara-experimental-summary-ext-2023-10-23-small', - description: 'In beta, available to both Growth and Scale Vectara users' + description: `In beta, available to both Growth and Scale Vectara users` }, { label: 'vectara-summary-ext-v1.3.0 (gpt-4.0)', name: 'vectara-summary-ext-v1.3.0', - description: 'Only available to paying Scale Vectara users' + description: 'Only available to Scale Vectara users' }, { label: 'vectara-experimental-summary-ext-2023-10-23-med (gpt-4.0)', name: 'vectara-experimental-summary-ext-2023-10-23-med', - description: 'In beta, only available to paying Scale Vectara users' + description: `In beta, only available to Scale Vectara users` } ], default: 'vectara-summary-ext-v1.2.0' @@ -228,7 +229,7 @@ class VectaraChain_Chains implements INode { async run(nodeData: INodeData, input: string): Promise { const vectorStore = nodeData.inputs?.vectaraStore as VectaraStore - const responseLang = (nodeData.inputs?.responseLang as string) ?? 'auto' + const responseLang = (nodeData.inputs?.responseLang as string) ?? 'eng' const summarizerPromptName = nodeData.inputs?.summarizerPromptName as string const maxSummarizedResultsStr = nodeData.inputs?.maxSummarizedResults as string const maxSummarizedResults = maxSummarizedResultsStr ? parseInt(maxSummarizedResultsStr, 10) : 7 @@ -247,17 +248,28 @@ class VectaraChain_Chains implements INode { lexicalInterpolationConfig: { lambda: vectaraFilter?.lambda ?? 0.025 } })) + const mmrRerankerId = 272725718 // Vectara reranker ID for MMR const data = { query: [ { query: input, start: 0, - numResults: topK, + numResults: vectaraFilter?.mmrConfig?.mmrK > 0 ? vectaraFilter?.mmrK : topK, + corpusKey: corpusKeys, contextConfig: { sentencesAfter: vectaraFilter?.contextConfig?.sentencesAfter ?? 2, sentencesBefore: vectaraFilter?.contextConfig?.sentencesBefore ?? 2 }, - corpusKey: corpusKeys, + ...(vectaraFilter?.mmrConfig?.mmrK > 0 + ? { + rerankingConfig: { + rerankerId: mmrRerankerId, + mmrConfig: { + diversityBias: vectaraFilter?.mmrConfig.diversityBias + } + } + } + : {}), summary: [ { summarizerPromptName, @@ -285,6 +297,14 @@ class VectaraChain_Chains implements INode { const documents = result.responseSet[0].document let rawSummarizedText = '' + // remove responses that are not in the topK (in case of MMR) + // Note that this does not really matter functionally due to the reorder citations, but it is more efficient + const maxResponses = vectaraFilter?.mmrConfig?.mmrK > 0 ? Math.min(responses.length, topK) : responses.length + if (responses.length > maxResponses) { + responses.splice(0, maxResponses) + } + + // Add metadata to each text response given its corresponding document metadata for (let i = 0; i < responses.length; i += 1) { const responseMetadata = responses[i].metadata const documentMetadata = documents[responses[i].documentIndex].metadata @@ -301,13 +321,13 @@ class VectaraChain_Chains implements INode { responses[i].metadata = combinedMetadata } + // Create the summarization response const summaryStatus = result.responseSet[0].summary[0].status if (summaryStatus.length > 0 && summaryStatus[0].code === 'BAD_REQUEST') { throw new Error( `BAD REQUEST: Too much text for the summarizer to summarize. Please try reducing the number of search results to summarize, or the context of each result by adjusting the 'summary_num_sentences', and 'summary_num_results' parameters respectively.` ) } - if ( summaryStatus.length > 0 && summaryStatus[0].code === 'NOT_FOUND' && @@ -316,8 +336,8 @@ class VectaraChain_Chains implements INode { throw new Error(`BAD REQUEST: summarizer ${summarizerPromptName} is invalid for this account.`) } + // Reorder citations in summary and create the list of returned source documents rawSummarizedText = result.responseSet[0].summary[0]?.text - let summarizedText = reorderCitations(rawSummarizedText) let summaryResponses = applyCitationOrder(responses, rawSummarizedText) diff --git a/packages/components/nodes/vectorstores/Vectara/Vectara.ts b/packages/components/nodes/vectorstores/Vectara/Vectara.ts index 7460c5864..98acf00c2 100644 --- a/packages/components/nodes/vectorstores/Vectara/Vectara.ts +++ b/packages/components/nodes/vectorstores/Vectara/Vectara.ts @@ -82,7 +82,9 @@ class Vectara_VectorStores implements INode { label: 'Lambda', name: 'lambda', description: - 'Improves retrieval accuracy by adjusting the balance (from 0 to 1) between neural search and keyword-based search factors.', + 'Enable hybrid search to improve retrieval accuracy by adjusting the balance (from 0 to 1) between neural search and keyword-based search factors.' + + 'A value of 0.0 means that only neural search is used, while a value of 1.0 means that only keyword-based search is used. Defaults to 0.0 (neural only).', + default: 0.0, type: 'number', additionalParams: true, optional: true @@ -90,8 +92,26 @@ class Vectara_VectorStores implements INode { { label: 'Top K', name: 'topK', - description: 'Number of top results to fetch. Defaults to 4', - placeholder: '4', + description: 'Number of top results to fetch. Defaults to 5', + placeholder: '5', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'MMR K', + name: 'mmrK', + description: 'Number of top results to fetch for MMR. Defaults to 50', + placeholder: '50', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'MMR diversity bias', + name: 'mmrDiversityBias', + description: 'The diversity bias to use for MMR. Defaults to 0.3', + placeholder: '0.3', type: 'number', additionalParams: true, optional: true @@ -191,7 +211,9 @@ class Vectara_VectorStores implements INode { const lambda = nodeData.inputs?.lambda as number const output = nodeData.outputs?.output as string const topK = nodeData.inputs?.topK as string - const k = topK ? parseFloat(topK) : 4 + const k = topK ? parseFloat(topK) : 5 + const mmrK = nodeData.inputs?.mmrK as number + const mmrDiversityBias = nodeData.inputs?.mmrDiversityBias as number const vectaraArgs: VectaraLibArgs = { apiKey: apiKey, @@ -208,6 +230,7 @@ class Vectara_VectorStores implements INode { if (sentencesBefore) vectaraContextConfig.sentencesBefore = sentencesBefore if (sentencesAfter) vectaraContextConfig.sentencesAfter = sentencesAfter vectaraFilter.contextConfig = vectaraContextConfig + if (mmrK) vectaraFilter.mmrConfig = { mmrK: mmrK, diversityBias: mmrDiversityBias } const vectorStore = new VectaraStore(vectaraArgs)