added support for MMR

This commit is contained in:
Ofer Mendelevitch 2023-12-31 18:36:23 -08:00 committed by Ilango
parent adfeb37e8b
commit b1b9b9fcff
2 changed files with 56 additions and 13 deletions

View File

@ -69,22 +69,23 @@ class VectaraChain_Chains implements INode {
options: [ options: [
{ {
label: 'vectara-summary-ext-v1.2.0 (gpt-3.5-turbo)', label: 'vectara-summary-ext-v1.2.0 (gpt-3.5-turbo)',
name: 'vectara-summary-ext-v1.2.0' name: 'vectara-summary-ext-v1.2.0',
description: 'base summarizer, available to all Vectara users'
}, },
{ {
label: 'vectara-experimental-summary-ext-2023-10-23-small (gpt-3.5-turbo)', label: 'vectara-experimental-summary-ext-2023-10-23-small (gpt-3.5-turbo)',
name: 'vectara-experimental-summary-ext-2023-10-23-small', name: 'vectara-experimental-summary-ext-2023-10-23-small',
description: 'In beta, available to both Growth and Scale Vectara users' description: `In beta, available to both Growth and <a target="_blank" href="https://vectara.com/pricing/">Scale</a> Vectara users`
}, },
{ {
label: 'vectara-summary-ext-v1.3.0 (gpt-4.0)', label: 'vectara-summary-ext-v1.3.0 (gpt-4.0)',
name: 'vectara-summary-ext-v1.3.0', name: 'vectara-summary-ext-v1.3.0',
description: 'Only available to paying Scale Vectara users' description: 'Only available to <a target="_blank" href="https://vectara.com/pricing/">Scale</a> Vectara users'
}, },
{ {
label: 'vectara-experimental-summary-ext-2023-10-23-med (gpt-4.0)', label: 'vectara-experimental-summary-ext-2023-10-23-med (gpt-4.0)',
name: 'vectara-experimental-summary-ext-2023-10-23-med', name: 'vectara-experimental-summary-ext-2023-10-23-med',
description: 'In beta, only available to paying Scale Vectara users' description: `In beta, only available to <a target="_blank" href="https://vectara.com/pricing/">Scale</a> Vectara users`
} }
], ],
default: 'vectara-summary-ext-v1.2.0' default: 'vectara-summary-ext-v1.2.0'
@ -228,7 +229,7 @@ class VectaraChain_Chains implements INode {
async run(nodeData: INodeData, input: string): Promise<object> { async run(nodeData: INodeData, input: string): Promise<object> {
const vectorStore = nodeData.inputs?.vectaraStore as VectaraStore const vectorStore = nodeData.inputs?.vectaraStore as VectaraStore
const responseLang = (nodeData.inputs?.responseLang as string) ?? 'auto' const responseLang = (nodeData.inputs?.responseLang as string) ?? 'eng'
const summarizerPromptName = nodeData.inputs?.summarizerPromptName as string const summarizerPromptName = nodeData.inputs?.summarizerPromptName as string
const maxSummarizedResultsStr = nodeData.inputs?.maxSummarizedResults as string const maxSummarizedResultsStr = nodeData.inputs?.maxSummarizedResults as string
const maxSummarizedResults = maxSummarizedResultsStr ? parseInt(maxSummarizedResultsStr, 10) : 7 const maxSummarizedResults = maxSummarizedResultsStr ? parseInt(maxSummarizedResultsStr, 10) : 7
@ -247,17 +248,28 @@ class VectaraChain_Chains implements INode {
lexicalInterpolationConfig: { lambda: vectaraFilter?.lambda ?? 0.025 } lexicalInterpolationConfig: { lambda: vectaraFilter?.lambda ?? 0.025 }
})) }))
const mmrRerankerId = 272725718 // Vectara reranker ID for MMR
const data = { const data = {
query: [ query: [
{ {
query: input, query: input,
start: 0, start: 0,
numResults: topK, numResults: vectaraFilter?.mmrConfig?.mmrK > 0 ? vectaraFilter?.mmrK : topK,
corpusKey: corpusKeys,
contextConfig: { contextConfig: {
sentencesAfter: vectaraFilter?.contextConfig?.sentencesAfter ?? 2, sentencesAfter: vectaraFilter?.contextConfig?.sentencesAfter ?? 2,
sentencesBefore: vectaraFilter?.contextConfig?.sentencesBefore ?? 2 sentencesBefore: vectaraFilter?.contextConfig?.sentencesBefore ?? 2
}, },
corpusKey: corpusKeys, ...(vectaraFilter?.mmrConfig?.mmrK > 0
? {
rerankingConfig: {
rerankerId: mmrRerankerId,
mmrConfig: {
diversityBias: vectaraFilter?.mmrConfig.diversityBias
}
}
}
: {}),
summary: [ summary: [
{ {
summarizerPromptName, summarizerPromptName,
@ -285,6 +297,14 @@ class VectaraChain_Chains implements INode {
const documents = result.responseSet[0].document const documents = result.responseSet[0].document
let rawSummarizedText = '' let rawSummarizedText = ''
// remove responses that are not in the topK (in case of MMR)
// Note that this does not really matter functionally due to the reorder citations, but it is more efficient
const maxResponses = vectaraFilter?.mmrConfig?.mmrK > 0 ? Math.min(responses.length, topK) : responses.length
if (responses.length > maxResponses) {
responses.splice(0, maxResponses)
}
// Add metadata to each text response given its corresponding document metadata
for (let i = 0; i < responses.length; i += 1) { for (let i = 0; i < responses.length; i += 1) {
const responseMetadata = responses[i].metadata const responseMetadata = responses[i].metadata
const documentMetadata = documents[responses[i].documentIndex].metadata const documentMetadata = documents[responses[i].documentIndex].metadata
@ -301,13 +321,13 @@ class VectaraChain_Chains implements INode {
responses[i].metadata = combinedMetadata responses[i].metadata = combinedMetadata
} }
// Create the summarization response
const summaryStatus = result.responseSet[0].summary[0].status const summaryStatus = result.responseSet[0].summary[0].status
if (summaryStatus.length > 0 && summaryStatus[0].code === 'BAD_REQUEST') { if (summaryStatus.length > 0 && summaryStatus[0].code === 'BAD_REQUEST') {
throw new Error( throw new Error(
`BAD REQUEST: Too much text for the summarizer to summarize. Please try reducing the number of search results to summarize, or the context of each result by adjusting the 'summary_num_sentences', and 'summary_num_results' parameters respectively.` `BAD REQUEST: Too much text for the summarizer to summarize. Please try reducing the number of search results to summarize, or the context of each result by adjusting the 'summary_num_sentences', and 'summary_num_results' parameters respectively.`
) )
} }
if ( if (
summaryStatus.length > 0 && summaryStatus.length > 0 &&
summaryStatus[0].code === 'NOT_FOUND' && summaryStatus[0].code === 'NOT_FOUND' &&
@ -316,8 +336,8 @@ class VectaraChain_Chains implements INode {
throw new Error(`BAD REQUEST: summarizer ${summarizerPromptName} is invalid for this account.`) throw new Error(`BAD REQUEST: summarizer ${summarizerPromptName} is invalid for this account.`)
} }
// Reorder citations in summary and create the list of returned source documents
rawSummarizedText = result.responseSet[0].summary[0]?.text rawSummarizedText = result.responseSet[0].summary[0]?.text
let summarizedText = reorderCitations(rawSummarizedText) let summarizedText = reorderCitations(rawSummarizedText)
let summaryResponses = applyCitationOrder(responses, rawSummarizedText) let summaryResponses = applyCitationOrder(responses, rawSummarizedText)

View File

@ -82,7 +82,9 @@ class Vectara_VectorStores implements INode {
label: 'Lambda', label: 'Lambda',
name: 'lambda', name: 'lambda',
description: description:
'Improves retrieval accuracy by adjusting the balance (from 0 to 1) between neural search and keyword-based search factors.', 'Enable hybrid search to improve retrieval accuracy by adjusting the balance (from 0 to 1) between neural search and keyword-based search factors.' +
'A value of 0.0 means that only neural search is used, while a value of 1.0 means that only keyword-based search is used. Defaults to 0.0 (neural only).',
default: 0.0,
type: 'number', type: 'number',
additionalParams: true, additionalParams: true,
optional: true optional: true
@ -90,8 +92,26 @@ class Vectara_VectorStores implements INode {
{ {
label: 'Top K', label: 'Top K',
name: 'topK', name: 'topK',
description: 'Number of top results to fetch. Defaults to 4', description: 'Number of top results to fetch. Defaults to 5',
placeholder: '4', placeholder: '5',
type: 'number',
additionalParams: true,
optional: true
},
{
label: 'MMR K',
name: 'mmrK',
description: 'Number of top results to fetch for MMR. Defaults to 50',
placeholder: '50',
type: 'number',
additionalParams: true,
optional: true
},
{
label: 'MMR diversity bias',
name: 'mmrDiversityBias',
description: 'The diversity bias to use for MMR. Defaults to 0.3',
placeholder: '0.3',
type: 'number', type: 'number',
additionalParams: true, additionalParams: true,
optional: true optional: true
@ -191,7 +211,9 @@ class Vectara_VectorStores implements INode {
const lambda = nodeData.inputs?.lambda as number const lambda = nodeData.inputs?.lambda as number
const output = nodeData.outputs?.output as string const output = nodeData.outputs?.output as string
const topK = nodeData.inputs?.topK as string const topK = nodeData.inputs?.topK as string
const k = topK ? parseFloat(topK) : 4 const k = topK ? parseFloat(topK) : 5
const mmrK = nodeData.inputs?.mmrK as number
const mmrDiversityBias = nodeData.inputs?.mmrDiversityBias as number
const vectaraArgs: VectaraLibArgs = { const vectaraArgs: VectaraLibArgs = {
apiKey: apiKey, apiKey: apiKey,
@ -208,6 +230,7 @@ class Vectara_VectorStores implements INode {
if (sentencesBefore) vectaraContextConfig.sentencesBefore = sentencesBefore if (sentencesBefore) vectaraContextConfig.sentencesBefore = sentencesBefore
if (sentencesAfter) vectaraContextConfig.sentencesAfter = sentencesAfter if (sentencesAfter) vectaraContextConfig.sentencesAfter = sentencesAfter
vectaraFilter.contextConfig = vectaraContextConfig vectaraFilter.contextConfig = vectaraContextConfig
if (mmrK) vectaraFilter.mmrConfig = { mmrK: mmrK, diversityBias: mmrDiversityBias }
const vectorStore = new VectaraStore(vectaraArgs) const vectorStore = new VectaraStore(vectaraArgs)