Merge pull request #292 from FlowiseAI/feature/TopK
Feature/Add top K to vector stores
This commit is contained in:
commit
e9184ede0c
|
|
@ -45,8 +45,9 @@ class BabyAGI_Agents implements INode {
|
|||
const model = nodeData.inputs?.model as BaseChatModel
|
||||
const vectorStore = nodeData.inputs?.vectorStore as VectorStore
|
||||
const taskLoop = nodeData.inputs?.taskLoop as string
|
||||
const k = (vectorStore as any)?.k ?? 4
|
||||
|
||||
const babyAgi = BabyAGI.fromLLM(model, vectorStore, parseInt(taskLoop, 10))
|
||||
const babyAgi = BabyAGI.fromLLM(model, vectorStore, parseInt(taskLoop, 10), k)
|
||||
return babyAgi
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -154,18 +154,22 @@ export class BabyAGI {
|
|||
|
||||
maxIterations = 3
|
||||
|
||||
topK = 4
|
||||
|
||||
constructor(
|
||||
taskCreationChain: TaskCreationChain,
|
||||
taskPrioritizationChain: TaskPrioritizationChain,
|
||||
executionChain: ExecutionChain,
|
||||
vectorStore: VectorStore,
|
||||
maxIterations: number
|
||||
maxIterations: number,
|
||||
topK: number
|
||||
) {
|
||||
this.taskCreationChain = taskCreationChain
|
||||
this.taskPrioritizationChain = taskPrioritizationChain
|
||||
this.executionChain = executionChain
|
||||
this.vectorStore = vectorStore
|
||||
this.maxIterations = maxIterations
|
||||
this.topK = topK
|
||||
}
|
||||
|
||||
addTask(task: Task) {
|
||||
|
|
@ -219,7 +223,7 @@ export class BabyAGI {
|
|||
this.printNextTask(task)
|
||||
|
||||
// Step 2: Execute the task
|
||||
const result = await executeTask(this.vectorStore, this.executionChain, objective, task.task_name)
|
||||
const result = await executeTask(this.vectorStore, this.executionChain, objective, task.task_name, this.topK)
|
||||
const thisTaskId = task.task_id
|
||||
finalResult = result
|
||||
this.printTaskResult(result)
|
||||
|
|
@ -257,10 +261,10 @@ export class BabyAGI {
|
|||
return finalResult
|
||||
}
|
||||
|
||||
static fromLLM(llm: BaseChatModel, vectorstore: VectorStore, maxIterations = 3): BabyAGI {
|
||||
static fromLLM(llm: BaseChatModel, vectorstore: VectorStore, maxIterations = 3, topK = 4): BabyAGI {
|
||||
const taskCreationChain = TaskCreationChain.from_llm(llm)
|
||||
const taskPrioritizationChain = TaskPrioritizationChain.from_llm(llm)
|
||||
const executionChain = ExecutionChain.from_llm(llm)
|
||||
return new BabyAGI(taskCreationChain, taskPrioritizationChain, executionChain, vectorstore, maxIterations)
|
||||
return new BabyAGI(taskCreationChain, taskPrioritizationChain, executionChain, vectorstore, maxIterations, topK)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ class MultiRetrievalQAChain_Chains implements INode {
|
|||
for (const vs of vectorStoreRetriever) {
|
||||
retrieverNames.push(vs.name)
|
||||
retrieverDescriptions.push(vs.description)
|
||||
retrievers.push(vs.vectorStore.asRetriever())
|
||||
retrievers.push(vs.vectorStore.asRetriever((vs.vectorStore as any).k ?? 4))
|
||||
}
|
||||
|
||||
const chain = MultiRetrievalQAChain.fromRetrievers(model, retrieverNames, retrieverDescriptions, retrievers, undefined, {
|
||||
|
|
|
|||
|
|
@ -40,7 +40,10 @@ class VectorDBQAChain_Chains implements INode {
|
|||
const model = nodeData.inputs?.model as BaseLanguageModel
|
||||
const vectorStore = nodeData.inputs?.vectorStore as VectorStore
|
||||
|
||||
const chain = VectorDBQAChain.fromLLM(model, vectorStore, { verbose: process.env.DEBUG === 'true' ? true : false })
|
||||
const chain = VectorDBQAChain.fromLLM(model, vectorStore, {
|
||||
k: (vectorStore as any)?.k ?? 4,
|
||||
verbose: process.env.DEBUG === 'true' ? true : false
|
||||
})
|
||||
return chain
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -38,6 +38,15 @@ class Chroma_Existing_VectorStores implements INode {
|
|||
name: 'chromaURL',
|
||||
type: 'string',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
|
|
@ -59,6 +68,8 @@ class Chroma_Existing_VectorStores implements INode {
|
|||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const chromaURL = nodeData.inputs?.chromaURL as string
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
const obj: {
|
||||
collectionName: string
|
||||
|
|
@ -69,9 +80,10 @@ class Chroma_Existing_VectorStores implements INode {
|
|||
const vectorStore = await Chroma.fromExistingCollection(embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever()
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
|
|
|
|||
|
|
@ -46,6 +46,15 @@ class ChromaUpsert_VectorStores implements INode {
|
|||
name: 'chromaURL',
|
||||
type: 'string',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
|
|
@ -68,6 +77,8 @@ class ChromaUpsert_VectorStores implements INode {
|
|||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const chromaURL = nodeData.inputs?.chromaURL as string
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
|
|
@ -84,9 +95,10 @@ class ChromaUpsert_VectorStores implements INode {
|
|||
const vectorStore = await Chroma.fromDocuments(finalDocs, embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever()
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
|
|
|
|||
|
|
@ -34,6 +34,15 @@ class Faiss_Existing_VectorStores implements INode {
|
|||
description: 'Path to load faiss.index file',
|
||||
placeholder: `C:\\Users\\User\\Desktop`,
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
|
|
@ -54,13 +63,16 @@ class Faiss_Existing_VectorStores implements INode {
|
|||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const basePath = nodeData.inputs?.basePath as string
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
const vectorStore = await FaissStore.load(basePath, embeddings)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever()
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
|
|
|
|||
|
|
@ -42,6 +42,15 @@ class FaissUpsert_VectorStores implements INode {
|
|||
description: 'Path to store faiss.index file',
|
||||
placeholder: `C:\\Users\\User\\Desktop`,
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
|
|
@ -63,6 +72,8 @@ class FaissUpsert_VectorStores implements INode {
|
|||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const basePath = nodeData.inputs?.basePath as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
|
|
@ -74,9 +85,10 @@ class FaissUpsert_VectorStores implements INode {
|
|||
await vectorStore.save(basePath)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever()
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
|
|
|
|||
|
|
@ -35,6 +35,14 @@ class InMemoryVectorStore_VectorStores implements INode {
|
|||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
|
|
@ -55,6 +63,8 @@ class InMemoryVectorStore_VectorStores implements INode {
|
|||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
|
|
@ -65,9 +75,10 @@ class InMemoryVectorStore_VectorStores implements INode {
|
|||
const vectorStore = await MemoryVectorStore.fromDocuments(finalDocs, embeddings)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever()
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
|
|
|
|||
|
|
@ -49,6 +49,7 @@ class Pinecone_Existing_VectorStores implements INode {
|
|||
name: 'pineconeNamespace',
|
||||
type: 'string',
|
||||
placeholder: 'my-first-namespace',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
|
|
@ -57,6 +58,15 @@ class Pinecone_Existing_VectorStores implements INode {
|
|||
type: 'json',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
|
|
@ -79,9 +89,10 @@ class Pinecone_Existing_VectorStores implements INode {
|
|||
const index = nodeData.inputs?.pineconeIndex as string
|
||||
const pineconeNamespace = nodeData.inputs?.pineconeNamespace as string
|
||||
const pineconeMetadataFilter = nodeData.inputs?.pineconeMetadataFilter
|
||||
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
const client = new PineconeClient()
|
||||
await client.init({
|
||||
|
|
@ -104,9 +115,10 @@ class Pinecone_Existing_VectorStores implements INode {
|
|||
const vectorStore = await PineconeStore.fromExistingIndex(embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever()
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
|
|
|
|||
|
|
@ -57,6 +57,16 @@ class PineconeUpsert_VectorStores implements INode {
|
|||
name: 'pineconeNamespace',
|
||||
type: 'string',
|
||||
placeholder: 'my-first-namespace',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
|
|
@ -82,6 +92,8 @@ class PineconeUpsert_VectorStores implements INode {
|
|||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
const client = new PineconeClient()
|
||||
await client.init({
|
||||
|
|
@ -106,9 +118,10 @@ class PineconeUpsert_VectorStores implements INode {
|
|||
const vectorStore = await PineconeStore.fromDocuments(finalDocs, embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever()
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
|
|
|
|||
|
|
@ -55,6 +55,15 @@ class Supabase_Existing_VectorStores implements INode {
|
|||
type: 'json',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
|
|
@ -79,6 +88,8 @@ class Supabase_Existing_VectorStores implements INode {
|
|||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const supabaseMetadataFilter = nodeData.inputs?.supabaseMetadataFilter
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
const client = createClient(supabaseProjUrl, supabaseApiKey)
|
||||
|
||||
|
|
@ -96,9 +107,10 @@ class Supabase_Existing_VectorStores implements INode {
|
|||
const vectorStore = await SupabaseVectorStore.fromExistingIndex(embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever()
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
|
|
|
|||
|
|
@ -56,6 +56,15 @@ class SupabaseUpsert_VectorStores implements INode {
|
|||
label: 'Query Name',
|
||||
name: 'queryName',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
|
|
@ -80,6 +89,8 @@ class SupabaseUpsert_VectorStores implements INode {
|
|||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
const client = createClient(supabaseProjUrl, supabaseApiKey)
|
||||
|
||||
|
|
@ -96,9 +107,10 @@ class SupabaseUpsert_VectorStores implements INode {
|
|||
})
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever()
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
|
|
|
|||
|
|
@ -79,6 +79,15 @@ class Weaviate_Existing_VectorStores implements INode {
|
|||
placeholder: `["foo"]`,
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
|
|
@ -102,9 +111,10 @@ class Weaviate_Existing_VectorStores implements INode {
|
|||
const weaviateApiKey = nodeData.inputs?.weaviateApiKey as string
|
||||
const weaviateTextKey = nodeData.inputs?.weaviateTextKey as string
|
||||
const weaviateMetadataKeys = nodeData.inputs?.weaviateMetadataKeys as string
|
||||
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
const clientConfig: any = {
|
||||
scheme: weaviateScheme,
|
||||
|
|
@ -125,9 +135,10 @@ class Weaviate_Existing_VectorStores implements INode {
|
|||
const vectorStore = await WeaviateStore.fromExistingIndex(embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever()
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
|
|
|
|||
|
|
@ -87,6 +87,15 @@ class WeaviateUpsert_VectorStores implements INode {
|
|||
placeholder: `["foo"]`,
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
|
|
@ -110,10 +119,11 @@ class WeaviateUpsert_VectorStores implements INode {
|
|||
const weaviateApiKey = nodeData.inputs?.weaviateApiKey as string
|
||||
const weaviateTextKey = nodeData.inputs?.weaviateTextKey as string
|
||||
const weaviateMetadataKeys = nodeData.inputs?.weaviateMetadataKeys as string
|
||||
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
const clientConfig: any = {
|
||||
scheme: weaviateScheme,
|
||||
|
|
@ -140,9 +150,10 @@ class WeaviateUpsert_VectorStores implements INode {
|
|||
const vectorStore = await WeaviateStore.fromDocuments(finalDocs, embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever()
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
|
|
|
|||
|
|
@ -346,6 +346,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -533,6 +541,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -664,6 +680,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_2-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_2-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
|
|||
|
|
@ -276,6 +276,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
|
|||
|
|
@ -192,6 +192,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -412,6 +420,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_0-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_0-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -480,6 +496,7 @@
|
|||
"type": "string",
|
||||
"placeholder": "my-first-namespace",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "pineconeExistingIndex_1-input-pineconeNamespace-string"
|
||||
},
|
||||
{
|
||||
|
|
@ -489,6 +506,16 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "pineconeExistingIndex_1-input-pineconeMetadataFilter-json"
|
||||
},
|
||||
{
|
||||
"label": "Top K",
|
||||
"name": "topK",
|
||||
"description": "Number of top results to fetch. Default to 4",
|
||||
"placeholder": "4",
|
||||
"type": "number",
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "pineconeExistingIndex_1-input-topK-number"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
|
|||
|
|
@ -48,6 +48,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -116,6 +124,7 @@
|
|||
"type": "string",
|
||||
"placeholder": "my-first-namespace",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "pineconeExistingIndex_1-input-pineconeNamespace-string"
|
||||
},
|
||||
{
|
||||
|
|
@ -125,6 +134,16 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "pineconeExistingIndex_1-input-pineconeMetadataFilter-json"
|
||||
},
|
||||
{
|
||||
"label": "Top K",
|
||||
"name": "topK",
|
||||
"description": "Number of top results to fetch. Default to 4",
|
||||
"placeholder": "4",
|
||||
"type": "number",
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "pineconeExistingIndex_1-input-topK-number"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
@ -276,6 +295,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
|
|||
|
|
@ -355,6 +355,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
|
|||
|
|
@ -102,6 +102,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
|
|||
|
|
@ -169,6 +169,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -237,7 +245,18 @@
|
|||
"type": "string",
|
||||
"placeholder": "my-first-namespace",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "pineconeUpsert_1-input-pineconeNamespace-string"
|
||||
},
|
||||
{
|
||||
"label": "Top K",
|
||||
"name": "topK",
|
||||
"description": "Number of top results to fetch. Default to 4",
|
||||
"placeholder": "4",
|
||||
"type": "number",
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "pineconeUpsert_1-input-topK-number"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
@ -397,6 +416,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -445,6 +472,13 @@
|
|||
"category": "Chains",
|
||||
"description": "Document QA - built on RetrievalQAChain to provide a chat history component",
|
||||
"inputParams": [
|
||||
{
|
||||
"label": "Return Source Documents",
|
||||
"name": "returnSourceDocuments",
|
||||
"type": "boolean",
|
||||
"optional": true,
|
||||
"id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean"
|
||||
},
|
||||
{
|
||||
"label": "System Message",
|
||||
"name": "systemMessagePrompt",
|
||||
|
|
@ -454,6 +488,31 @@
|
|||
"optional": true,
|
||||
"placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.",
|
||||
"id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string"
|
||||
},
|
||||
{
|
||||
"label": "Chain Option",
|
||||
"name": "chainOption",
|
||||
"type": "options",
|
||||
"options": [
|
||||
{
|
||||
"label": "MapReduceDocumentsChain",
|
||||
"name": "map_reduce",
|
||||
"description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time"
|
||||
},
|
||||
{
|
||||
"label": "RefineDocumentsChain",
|
||||
"name": "refine",
|
||||
"description": "Suitable for QA tasks over a large number of documents."
|
||||
},
|
||||
{
|
||||
"label": "StuffDocumentsChain",
|
||||
"name": "stuff",
|
||||
"description": "Suitable for QA tasks over a small number of documents."
|
||||
}
|
||||
],
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "conversationalRetrievalQAChain_0-input-chainOption-options"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
|
|||
|
|
@ -186,6 +186,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -254,7 +262,18 @@
|
|||
"type": "string",
|
||||
"placeholder": "my-first-namespace",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "pineconeUpsert_1-input-pineconeNamespace-string"
|
||||
},
|
||||
{
|
||||
"label": "Top K",
|
||||
"name": "topK",
|
||||
"description": "Number of top results to fetch. Default to 4",
|
||||
"placeholder": "4",
|
||||
"type": "number",
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "pineconeUpsert_1-input-topK-number"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
@ -414,6 +433,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -462,6 +489,13 @@
|
|||
"category": "Chains",
|
||||
"description": "Document QA - built on RetrievalQAChain to provide a chat history component",
|
||||
"inputParams": [
|
||||
{
|
||||
"label": "Return Source Documents",
|
||||
"name": "returnSourceDocuments",
|
||||
"type": "boolean",
|
||||
"optional": true,
|
||||
"id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean"
|
||||
},
|
||||
{
|
||||
"label": "System Message",
|
||||
"name": "systemMessagePrompt",
|
||||
|
|
@ -471,6 +505,31 @@
|
|||
"optional": true,
|
||||
"placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.",
|
||||
"id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string"
|
||||
},
|
||||
{
|
||||
"label": "Chain Option",
|
||||
"name": "chainOption",
|
||||
"type": "options",
|
||||
"options": [
|
||||
{
|
||||
"label": "MapReduceDocumentsChain",
|
||||
"name": "map_reduce",
|
||||
"description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time"
|
||||
},
|
||||
{
|
||||
"label": "RefineDocumentsChain",
|
||||
"name": "refine",
|
||||
"description": "Suitable for QA tasks over a large number of documents."
|
||||
},
|
||||
{
|
||||
"label": "StuffDocumentsChain",
|
||||
"name": "stuff",
|
||||
"description": "Suitable for QA tasks over a small number of documents."
|
||||
}
|
||||
],
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "conversationalRetrievalQAChain_0-input-chainOption-options"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
|
|||
|
|
@ -197,6 +197,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
|
|||
|
|
@ -114,6 +114,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -193,6 +201,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -261,6 +277,7 @@
|
|||
"type": "string",
|
||||
"placeholder": "my-first-namespace",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "pineconeExistingIndex_0-input-pineconeNamespace-string"
|
||||
},
|
||||
{
|
||||
|
|
@ -270,6 +287,16 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "pineconeExistingIndex_0-input-pineconeMetadataFilter-json"
|
||||
},
|
||||
{
|
||||
"label": "Top K",
|
||||
"name": "topK",
|
||||
"description": "Number of top results to fetch. Default to 4",
|
||||
"placeholder": "4",
|
||||
"type": "number",
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "pineconeExistingIndex_0-input-topK-number"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
@ -339,6 +366,13 @@
|
|||
"category": "Chains",
|
||||
"description": "Document QA - built on RetrievalQAChain to provide a chat history component",
|
||||
"inputParams": [
|
||||
{
|
||||
"label": "Return Source Documents",
|
||||
"name": "returnSourceDocuments",
|
||||
"type": "boolean",
|
||||
"optional": true,
|
||||
"id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean"
|
||||
},
|
||||
{
|
||||
"label": "System Message",
|
||||
"name": "systemMessagePrompt",
|
||||
|
|
@ -348,6 +382,31 @@
|
|||
"optional": true,
|
||||
"placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.",
|
||||
"id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string"
|
||||
},
|
||||
{
|
||||
"label": "Chain Option",
|
||||
"name": "chainOption",
|
||||
"type": "options",
|
||||
"options": [
|
||||
{
|
||||
"label": "MapReduceDocumentsChain",
|
||||
"name": "map_reduce",
|
||||
"description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time"
|
||||
},
|
||||
{
|
||||
"label": "RefineDocumentsChain",
|
||||
"name": "refine",
|
||||
"description": "Suitable for QA tasks over a large number of documents."
|
||||
},
|
||||
{
|
||||
"label": "StuffDocumentsChain",
|
||||
"name": "stuff",
|
||||
"description": "Suitable for QA tasks over a small number of documents."
|
||||
}
|
||||
],
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "conversationalRetrievalQAChain_0-input-chainOption-options"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
|
|||
|
|
@ -171,6 +171,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -250,6 +258,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -466,7 +482,18 @@
|
|||
"type": "string",
|
||||
"placeholder": "my-first-namespace",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "pineconeUpsert_0-input-pineconeNamespace-string"
|
||||
},
|
||||
{
|
||||
"label": "Top K",
|
||||
"name": "topK",
|
||||
"description": "Number of top results to fetch. Default to 4",
|
||||
"placeholder": "4",
|
||||
"type": "number",
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "pineconeUpsert_0-input-topK-number"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
@ -543,6 +570,13 @@
|
|||
"category": "Chains",
|
||||
"description": "Document QA - built on RetrievalQAChain to provide a chat history component",
|
||||
"inputParams": [
|
||||
{
|
||||
"label": "Return Source Documents",
|
||||
"name": "returnSourceDocuments",
|
||||
"type": "boolean",
|
||||
"optional": true,
|
||||
"id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean"
|
||||
},
|
||||
{
|
||||
"label": "System Message",
|
||||
"name": "systemMessagePrompt",
|
||||
|
|
@ -552,6 +586,31 @@
|
|||
"optional": true,
|
||||
"placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.",
|
||||
"id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string"
|
||||
},
|
||||
{
|
||||
"label": "Chain Option",
|
||||
"name": "chainOption",
|
||||
"type": "options",
|
||||
"options": [
|
||||
{
|
||||
"label": "MapReduceDocumentsChain",
|
||||
"name": "map_reduce",
|
||||
"description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time"
|
||||
},
|
||||
{
|
||||
"label": "RefineDocumentsChain",
|
||||
"name": "refine",
|
||||
"description": "Suitable for QA tasks over a large number of documents."
|
||||
},
|
||||
{
|
||||
"label": "StuffDocumentsChain",
|
||||
"name": "stuff",
|
||||
"description": "Suitable for QA tasks over a small number of documents."
|
||||
}
|
||||
],
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "conversationalRetrievalQAChain_0-input-chainOption-options"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
|
|||
|
|
@ -362,6 +362,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
"nodes": [
|
||||
{
|
||||
"width": 300,
|
||||
"height": 505,
|
||||
"height": 504,
|
||||
"id": "vectorStoreRetriever_0",
|
||||
"position": {
|
||||
"x": 712.9322670298264,
|
||||
|
|
@ -69,7 +69,7 @@
|
|||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 280,
|
||||
"height": 279,
|
||||
"id": "multiRetrievalQAChain_0",
|
||||
"position": {
|
||||
"x": 1563.0150452201099,
|
||||
|
|
@ -128,7 +128,7 @@
|
|||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 505,
|
||||
"height": 504,
|
||||
"id": "vectorStoreRetriever_1",
|
||||
"position": {
|
||||
"x": 711.4902931206071,
|
||||
|
|
@ -194,7 +194,7 @@
|
|||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 505,
|
||||
"height": 504,
|
||||
"id": "vectorStoreRetriever_2",
|
||||
"position": {
|
||||
"x": 706.0716220151372,
|
||||
|
|
@ -260,7 +260,7 @@
|
|||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 524,
|
||||
"height": 523,
|
||||
"id": "chatOpenAI_0",
|
||||
"position": {
|
||||
"x": 1206.027762600755,
|
||||
|
|
@ -359,6 +359,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -391,7 +399,7 @@
|
|||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 330,
|
||||
"height": 329,
|
||||
"id": "openAIEmbeddings_0",
|
||||
"position": {
|
||||
"x": -254.88737984323413,
|
||||
|
|
@ -436,6 +444,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_0-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_0-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -464,7 +480,7 @@
|
|||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 703,
|
||||
"height": 603,
|
||||
"id": "pineconeExistingIndex_0",
|
||||
"position": {
|
||||
"x": 271.2513182410521,
|
||||
|
|
@ -504,6 +520,7 @@
|
|||
"type": "string",
|
||||
"placeholder": "my-first-namespace",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "pineconeExistingIndex_0-input-pineconeNamespace-string"
|
||||
},
|
||||
{
|
||||
|
|
@ -513,6 +530,16 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "pineconeExistingIndex_0-input-pineconeMetadataFilter-json"
|
||||
},
|
||||
{
|
||||
"label": "Top K",
|
||||
"name": "topK",
|
||||
"description": "Number of top results to fetch. Default to 4",
|
||||
"placeholder": "4",
|
||||
"type": "number",
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "pineconeExistingIndex_0-input-topK-number"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
@ -566,11 +593,11 @@
|
|||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 454,
|
||||
"height": 505,
|
||||
"id": "chromaExistingIndex_0",
|
||||
"position": {
|
||||
"x": 274.1430731555137,
|
||||
"y": 335.15344698725556
|
||||
"x": 269.2940530300552,
|
||||
"y": 262.41814510537796
|
||||
},
|
||||
"type": "customNode",
|
||||
"data": {
|
||||
|
|
@ -594,6 +621,16 @@
|
|||
"type": "string",
|
||||
"optional": true,
|
||||
"id": "chromaExistingIndex_0-input-chromaURL-string"
|
||||
},
|
||||
{
|
||||
"label": "Top K",
|
||||
"name": "topK",
|
||||
"description": "Number of top results to fetch. Default to 4",
|
||||
"placeholder": "4",
|
||||
"type": "number",
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "chromaExistingIndex_0-input-topK-number"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
@ -638,14 +675,14 @@
|
|||
},
|
||||
"selected": false,
|
||||
"positionAbsolute": {
|
||||
"x": 274.1430731555137,
|
||||
"y": 335.15344698725556
|
||||
"x": 269.2940530300552,
|
||||
"y": 262.41814510537796
|
||||
},
|
||||
"dragging": false
|
||||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 703,
|
||||
"height": 702,
|
||||
"id": "supabaseExistingIndex_0",
|
||||
"position": {
|
||||
"x": 273.7097153973373,
|
||||
|
|
@ -692,6 +729,16 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "supabaseExistingIndex_0-input-supabaseMetadataFilter-json"
|
||||
},
|
||||
{
|
||||
"label": "Top K",
|
||||
"name": "topK",
|
||||
"description": "Number of top results to fetch. Default to 4",
|
||||
"placeholder": "4",
|
||||
"type": "number",
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "supabaseExistingIndex_0-input-topK-number"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
"nodes": [
|
||||
{
|
||||
"width": 300,
|
||||
"height": 330,
|
||||
"height": 329,
|
||||
"id": "openAIEmbeddings_2",
|
||||
"position": {
|
||||
"x": 155.07832615625986,
|
||||
|
|
@ -48,6 +48,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_2-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_2-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -76,11 +84,11 @@
|
|||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 355,
|
||||
"height": 505,
|
||||
"id": "chromaExistingIndex_1",
|
||||
"position": {
|
||||
"x": 522.8177328694987,
|
||||
"y": -548.8355398674973
|
||||
"y": -723.8834555183237
|
||||
},
|
||||
"type": "customNode",
|
||||
"data": {
|
||||
|
|
@ -97,6 +105,23 @@
|
|||
"name": "collectionName",
|
||||
"type": "string",
|
||||
"id": "chromaExistingIndex_1-input-collectionName-string"
|
||||
},
|
||||
{
|
||||
"label": "Chroma URL",
|
||||
"name": "chromaURL",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"id": "chromaExistingIndex_1-input-chromaURL-string"
|
||||
},
|
||||
{
|
||||
"label": "Top K",
|
||||
"name": "topK",
|
||||
"description": "Number of top results to fetch. Default to 4",
|
||||
"placeholder": "4",
|
||||
"type": "number",
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "chromaExistingIndex_1-input-topK-number"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
@ -134,24 +159,24 @@
|
|||
}
|
||||
],
|
||||
"outputs": {
|
||||
"output": "vectorStore"
|
||||
"output": "retriever"
|
||||
},
|
||||
"selected": false
|
||||
},
|
||||
"positionAbsolute": {
|
||||
"x": 522.8177328694987,
|
||||
"y": -548.8355398674973
|
||||
"y": -723.8834555183237
|
||||
},
|
||||
"selected": false,
|
||||
"dragging": false
|
||||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 524,
|
||||
"height": 523,
|
||||
"id": "openAI_3",
|
||||
"position": {
|
||||
"x": 512.7434966474709,
|
||||
"y": -1107.9938317347255
|
||||
"x": 527.7101375911075,
|
||||
"y": -1290.6752949922043
|
||||
},
|
||||
"type": "customNode",
|
||||
"data": {
|
||||
|
|
@ -258,6 +283,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_3-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_3-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -284,69 +317,15 @@
|
|||
"selected": false
|
||||
},
|
||||
"positionAbsolute": {
|
||||
"x": 512.7434966474709,
|
||||
"y": -1107.9938317347255
|
||||
"x": 527.7101375911075,
|
||||
"y": -1290.6752949922043
|
||||
},
|
||||
"selected": false,
|
||||
"dragging": false
|
||||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 280,
|
||||
"id": "vectorDBQAChain_2",
|
||||
"position": {
|
||||
"x": 880.7795222381183,
|
||||
"y": -823.6550506138045
|
||||
},
|
||||
"type": "customNode",
|
||||
"data": {
|
||||
"id": "vectorDBQAChain_2",
|
||||
"label": "VectorDB QA Chain",
|
||||
"name": "vectorDBQAChain",
|
||||
"type": "VectorDBQAChain",
|
||||
"baseClasses": ["VectorDBQAChain", "BaseChain", "BaseLangChain"],
|
||||
"category": "Chains",
|
||||
"description": "QA chain for vector databases",
|
||||
"inputParams": [],
|
||||
"inputAnchors": [
|
||||
{
|
||||
"label": "Language Model",
|
||||
"name": "model",
|
||||
"type": "BaseLanguageModel",
|
||||
"id": "vectorDBQAChain_2-input-model-BaseLanguageModel"
|
||||
},
|
||||
{
|
||||
"label": "Vector Store",
|
||||
"name": "vectorStore",
|
||||
"type": "VectorStore",
|
||||
"id": "vectorDBQAChain_2-input-vectorStore-VectorStore"
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"model": "{{openAI_3.data.instance}}",
|
||||
"vectorStore": "{{chromaExistingIndex_1.data.instance}}"
|
||||
},
|
||||
"outputAnchors": [
|
||||
{
|
||||
"id": "vectorDBQAChain_2-output-vectorDBQAChain-VectorDBQAChain|BaseChain|BaseLangChain",
|
||||
"name": "vectorDBQAChain",
|
||||
"label": "VectorDBQAChain",
|
||||
"type": "VectorDBQAChain | BaseChain | BaseLangChain"
|
||||
}
|
||||
],
|
||||
"outputs": {},
|
||||
"selected": false
|
||||
},
|
||||
"positionAbsolute": {
|
||||
"x": 880.7795222381183,
|
||||
"y": -823.6550506138045
|
||||
},
|
||||
"selected": false,
|
||||
"dragging": false
|
||||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 602,
|
||||
"height": 601,
|
||||
"id": "chainTool_2",
|
||||
"position": {
|
||||
"x": 1251.240972921597,
|
||||
|
|
@ -397,7 +376,7 @@
|
|||
"name": "ai-paper-qa",
|
||||
"description": "AI Paper QA - useful for when you need to ask questions about the AI-Generated Content paper.",
|
||||
"returnDirect": "",
|
||||
"baseChain": "{{vectorDBQAChain_2.data.instance}}"
|
||||
"baseChain": "{{retrievalQAChain_0.data.instance}}"
|
||||
},
|
||||
"outputAnchors": [
|
||||
{
|
||||
|
|
@ -419,7 +398,7 @@
|
|||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 143,
|
||||
"height": 142,
|
||||
"id": "calculator_1",
|
||||
"position": {
|
||||
"x": 1649.5389102641816,
|
||||
|
|
@ -457,7 +436,7 @@
|
|||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 278,
|
||||
"height": 277,
|
||||
"id": "serpAPI_0",
|
||||
"position": {
|
||||
"x": 1654.5273488033688,
|
||||
|
|
@ -502,7 +481,7 @@
|
|||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 330,
|
||||
"height": 329,
|
||||
"id": "openAIEmbeddings_3",
|
||||
"position": {
|
||||
"x": 163.902196956619,
|
||||
|
|
@ -547,6 +526,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_3-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_3-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -575,7 +562,7 @@
|
|||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 524,
|
||||
"height": 523,
|
||||
"id": "openAI_4",
|
||||
"position": {
|
||||
"x": 529.8870809493459,
|
||||
|
|
@ -686,6 +673,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_4-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_4-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -720,11 +715,11 @@
|
|||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 703,
|
||||
"height": 603,
|
||||
"id": "pineconeExistingIndex_1",
|
||||
"position": {
|
||||
"x": 539.4840212380209,
|
||||
"y": 452.3690065882661
|
||||
"x": 525.6644489497978,
|
||||
"y": 420.1233379157454
|
||||
},
|
||||
"type": "customNode",
|
||||
"data": {
|
||||
|
|
@ -760,6 +755,7 @@
|
|||
"type": "string",
|
||||
"placeholder": "my-first-namespace",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "pineconeExistingIndex_1-input-pineconeNamespace-string"
|
||||
},
|
||||
{
|
||||
|
|
@ -769,6 +765,16 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "pineconeExistingIndex_1-input-pineconeMetadataFilter-json"
|
||||
},
|
||||
{
|
||||
"label": "Top K",
|
||||
"name": "topK",
|
||||
"description": "Number of top results to fetch. Default to 4",
|
||||
"placeholder": "4",
|
||||
"type": "number",
|
||||
"additionalParams": true,
|
||||
"optional": true,
|
||||
"id": "pineconeExistingIndex_1-input-topK-number"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [
|
||||
|
|
@ -808,78 +814,24 @@
|
|||
}
|
||||
],
|
||||
"outputs": {
|
||||
"output": "vectorStore"
|
||||
"output": "retriever"
|
||||
},
|
||||
"selected": false
|
||||
},
|
||||
"selected": false,
|
||||
"dragging": false,
|
||||
"positionAbsolute": {
|
||||
"x": 539.4840212380209,
|
||||
"y": 452.3690065882661
|
||||
"x": 525.6644489497978,
|
||||
"y": 420.1233379157454
|
||||
}
|
||||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 280,
|
||||
"id": "vectorDBQAChain_3",
|
||||
"position": {
|
||||
"x": 896.3238465010572,
|
||||
"y": 173.57643605877104
|
||||
},
|
||||
"type": "customNode",
|
||||
"data": {
|
||||
"id": "vectorDBQAChain_3",
|
||||
"label": "VectorDB QA Chain",
|
||||
"name": "vectorDBQAChain",
|
||||
"type": "VectorDBQAChain",
|
||||
"baseClasses": ["VectorDBQAChain", "BaseChain", "BaseLangChain"],
|
||||
"category": "Chains",
|
||||
"description": "QA chain for vector databases",
|
||||
"inputParams": [],
|
||||
"inputAnchors": [
|
||||
{
|
||||
"label": "Language Model",
|
||||
"name": "model",
|
||||
"type": "BaseLanguageModel",
|
||||
"id": "vectorDBQAChain_3-input-model-BaseLanguageModel"
|
||||
},
|
||||
{
|
||||
"label": "Vector Store",
|
||||
"name": "vectorStore",
|
||||
"type": "VectorStore",
|
||||
"id": "vectorDBQAChain_3-input-vectorStore-VectorStore"
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"model": "{{openAI_4.data.instance}}",
|
||||
"vectorStore": "{{pineconeExistingIndex_1.data.instance}}"
|
||||
},
|
||||
"outputAnchors": [
|
||||
{
|
||||
"id": "vectorDBQAChain_3-output-vectorDBQAChain-VectorDBQAChain|BaseChain|BaseLangChain",
|
||||
"name": "vectorDBQAChain",
|
||||
"label": "VectorDBQAChain",
|
||||
"type": "VectorDBQAChain | BaseChain | BaseLangChain"
|
||||
}
|
||||
],
|
||||
"outputs": {},
|
||||
"selected": false
|
||||
},
|
||||
"positionAbsolute": {
|
||||
"x": 896.3238465010572,
|
||||
"y": 173.57643605877104
|
||||
},
|
||||
"selected": false,
|
||||
"dragging": false
|
||||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 602,
|
||||
"height": 601,
|
||||
"id": "chainTool_3",
|
||||
"position": {
|
||||
"x": 1260.8044270644157,
|
||||
"y": -244.7000095631508
|
||||
"x": 1267.7142132085273,
|
||||
"y": -85.7749282485849
|
||||
},
|
||||
"type": "customNode",
|
||||
"data": {
|
||||
|
|
@ -926,7 +878,7 @@
|
|||
"name": "state-of-union-qa",
|
||||
"description": "State of the Union QA - useful for when you need to ask questions about the most recent state of the union address.",
|
||||
"returnDirect": "",
|
||||
"baseChain": "{{vectorDBQAChain_3.data.instance}}"
|
||||
"baseChain": "{{retrievalQAChain_1.data.instance}}"
|
||||
},
|
||||
"outputAnchors": [
|
||||
{
|
||||
|
|
@ -942,13 +894,13 @@
|
|||
"selected": false,
|
||||
"dragging": false,
|
||||
"positionAbsolute": {
|
||||
"x": 1260.8044270644157,
|
||||
"y": -244.7000095631508
|
||||
"x": 1267.7142132085273,
|
||||
"y": -85.7749282485849
|
||||
}
|
||||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 524,
|
||||
"height": 523,
|
||||
"id": "openAI_5",
|
||||
"position": {
|
||||
"x": 1683.95439713088,
|
||||
|
|
@ -1059,6 +1011,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_5-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_5-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -1093,7 +1053,7 @@
|
|||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 280,
|
||||
"height": 279,
|
||||
"id": "mrklAgentLLM_0",
|
||||
"position": {
|
||||
"x": 2061.891333395338,
|
||||
|
|
@ -1150,6 +1110,114 @@
|
|||
"y": -140.0694021759809
|
||||
},
|
||||
"dragging": false
|
||||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 279,
|
||||
"id": "retrievalQAChain_0",
|
||||
"position": {
|
||||
"x": 898.1253096948574,
|
||||
"y": -859.1174013418433
|
||||
},
|
||||
"type": "customNode",
|
||||
"data": {
|
||||
"id": "retrievalQAChain_0",
|
||||
"label": "Retrieval QA Chain",
|
||||
"name": "retrievalQAChain",
|
||||
"type": "RetrievalQAChain",
|
||||
"baseClasses": ["RetrievalQAChain", "BaseChain", "BaseLangChain"],
|
||||
"category": "Chains",
|
||||
"description": "QA chain to answer a question based on the retrieved documents",
|
||||
"inputParams": [],
|
||||
"inputAnchors": [
|
||||
{
|
||||
"label": "Language Model",
|
||||
"name": "model",
|
||||
"type": "BaseLanguageModel",
|
||||
"id": "retrievalQAChain_0-input-model-BaseLanguageModel"
|
||||
},
|
||||
{
|
||||
"label": "Vector Store Retriever",
|
||||
"name": "vectorStoreRetriever",
|
||||
"type": "BaseRetriever",
|
||||
"id": "retrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever"
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"model": "{{openAI_3.data.instance}}",
|
||||
"vectorStoreRetriever": "{{chromaExistingIndex_1.data.instance}}"
|
||||
},
|
||||
"outputAnchors": [
|
||||
{
|
||||
"id": "retrievalQAChain_0-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain",
|
||||
"name": "retrievalQAChain",
|
||||
"label": "RetrievalQAChain",
|
||||
"type": "RetrievalQAChain | BaseChain | BaseLangChain"
|
||||
}
|
||||
],
|
||||
"outputs": {},
|
||||
"selected": false
|
||||
},
|
||||
"selected": false,
|
||||
"positionAbsolute": {
|
||||
"x": 898.1253096948574,
|
||||
"y": -859.1174013418433
|
||||
},
|
||||
"dragging": false
|
||||
},
|
||||
{
|
||||
"width": 300,
|
||||
"height": 279,
|
||||
"id": "retrievalQAChain_1",
|
||||
"position": {
|
||||
"x": 895.4349543765911,
|
||||
"y": 166.60331503487222
|
||||
},
|
||||
"type": "customNode",
|
||||
"data": {
|
||||
"id": "retrievalQAChain_1",
|
||||
"label": "Retrieval QA Chain",
|
||||
"name": "retrievalQAChain",
|
||||
"type": "RetrievalQAChain",
|
||||
"baseClasses": ["RetrievalQAChain", "BaseChain", "BaseLangChain"],
|
||||
"category": "Chains",
|
||||
"description": "QA chain to answer a question based on the retrieved documents",
|
||||
"inputParams": [],
|
||||
"inputAnchors": [
|
||||
{
|
||||
"label": "Language Model",
|
||||
"name": "model",
|
||||
"type": "BaseLanguageModel",
|
||||
"id": "retrievalQAChain_1-input-model-BaseLanguageModel"
|
||||
},
|
||||
{
|
||||
"label": "Vector Store Retriever",
|
||||
"name": "vectorStoreRetriever",
|
||||
"type": "BaseRetriever",
|
||||
"id": "retrievalQAChain_1-input-vectorStoreRetriever-BaseRetriever"
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"model": "{{openAI_4.data.instance}}",
|
||||
"vectorStoreRetriever": "{{pineconeExistingIndex_1.data.instance}}"
|
||||
},
|
||||
"outputAnchors": [
|
||||
{
|
||||
"id": "retrievalQAChain_1-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain",
|
||||
"name": "retrievalQAChain",
|
||||
"label": "RetrievalQAChain",
|
||||
"type": "RetrievalQAChain | BaseChain | BaseLangChain"
|
||||
}
|
||||
],
|
||||
"outputs": {},
|
||||
"selected": false
|
||||
},
|
||||
"selected": false,
|
||||
"positionAbsolute": {
|
||||
"x": 895.4349543765911,
|
||||
"y": 166.60331503487222
|
||||
},
|
||||
"dragging": false
|
||||
}
|
||||
],
|
||||
"edges": [
|
||||
|
|
@ -1164,50 +1232,6 @@
|
|||
"label": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "chromaExistingIndex_1",
|
||||
"sourceHandle": "chromaExistingIndex_1-output-vectorStore-Chroma|VectorStore",
|
||||
"target": "vectorDBQAChain_2",
|
||||
"targetHandle": "vectorDBQAChain_2-input-vectorStore-VectorStore",
|
||||
"type": "buttonedge",
|
||||
"id": "chromaExistingIndex_1-chromaExistingIndex_1-output-vectorStore-Chroma|VectorStore-vectorDBQAChain_2-vectorDBQAChain_2-input-vectorStore-VectorStore",
|
||||
"data": {
|
||||
"label": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "openAI_3",
|
||||
"sourceHandle": "openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain",
|
||||
"target": "vectorDBQAChain_2",
|
||||
"targetHandle": "vectorDBQAChain_2-input-model-BaseLanguageModel",
|
||||
"type": "buttonedge",
|
||||
"id": "openAI_3-openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-vectorDBQAChain_2-vectorDBQAChain_2-input-model-BaseLanguageModel",
|
||||
"data": {
|
||||
"label": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "vectorDBQAChain_2",
|
||||
"sourceHandle": "vectorDBQAChain_2-output-vectorDBQAChain-VectorDBQAChain|BaseChain|BaseLangChain",
|
||||
"target": "chainTool_2",
|
||||
"targetHandle": "chainTool_2-input-baseChain-BaseChain",
|
||||
"type": "buttonedge",
|
||||
"id": "vectorDBQAChain_2-vectorDBQAChain_2-output-vectorDBQAChain-VectorDBQAChain|BaseChain|BaseLangChain-chainTool_2-chainTool_2-input-baseChain-BaseChain",
|
||||
"data": {
|
||||
"label": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "openAI_4",
|
||||
"sourceHandle": "openAI_4-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain",
|
||||
"target": "vectorDBQAChain_3",
|
||||
"targetHandle": "vectorDBQAChain_3-input-model-BaseLanguageModel",
|
||||
"type": "buttonedge",
|
||||
"id": "openAI_4-openAI_4-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-vectorDBQAChain_3-vectorDBQAChain_3-input-model-BaseLanguageModel",
|
||||
"data": {
|
||||
"label": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "openAIEmbeddings_3",
|
||||
"sourceHandle": "openAIEmbeddings_3-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
|
||||
|
|
@ -1219,28 +1243,6 @@
|
|||
"label": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "vectorDBQAChain_3",
|
||||
"sourceHandle": "vectorDBQAChain_3-output-vectorDBQAChain-VectorDBQAChain|BaseChain|BaseLangChain",
|
||||
"target": "chainTool_3",
|
||||
"targetHandle": "chainTool_3-input-baseChain-BaseChain",
|
||||
"type": "buttonedge",
|
||||
"id": "vectorDBQAChain_3-vectorDBQAChain_3-output-vectorDBQAChain-VectorDBQAChain|BaseChain|BaseLangChain-chainTool_3-chainTool_3-input-baseChain-BaseChain",
|
||||
"data": {
|
||||
"label": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "pineconeExistingIndex_1",
|
||||
"sourceHandle": "pineconeExistingIndex_1-output-vectorStore-Pinecone|VectorStore",
|
||||
"target": "vectorDBQAChain_3",
|
||||
"targetHandle": "vectorDBQAChain_3-input-vectorStore-VectorStore",
|
||||
"type": "buttonedge",
|
||||
"id": "pineconeExistingIndex_1-pineconeExistingIndex_1-output-vectorStore-Pinecone|VectorStore-vectorDBQAChain_3-vectorDBQAChain_3-input-vectorStore-VectorStore",
|
||||
"data": {
|
||||
"label": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "serpAPI_0",
|
||||
"sourceHandle": "serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain",
|
||||
|
|
@ -1295,6 +1297,72 @@
|
|||
"data": {
|
||||
"label": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "openAI_3",
|
||||
"sourceHandle": "openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain",
|
||||
"target": "retrievalQAChain_0",
|
||||
"targetHandle": "retrievalQAChain_0-input-model-BaseLanguageModel",
|
||||
"type": "buttonedge",
|
||||
"id": "openAI_3-openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-retrievalQAChain_0-retrievalQAChain_0-input-model-BaseLanguageModel",
|
||||
"data": {
|
||||
"label": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "chromaExistingIndex_1",
|
||||
"sourceHandle": "chromaExistingIndex_1-output-retriever-Chroma|VectorStoreRetriever|BaseRetriever",
|
||||
"target": "retrievalQAChain_0",
|
||||
"targetHandle": "retrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
|
||||
"type": "buttonedge",
|
||||
"id": "chromaExistingIndex_1-chromaExistingIndex_1-output-retriever-Chroma|VectorStoreRetriever|BaseRetriever-retrievalQAChain_0-retrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
|
||||
"data": {
|
||||
"label": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "retrievalQAChain_0",
|
||||
"sourceHandle": "retrievalQAChain_0-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain",
|
||||
"target": "chainTool_2",
|
||||
"targetHandle": "chainTool_2-input-baseChain-BaseChain",
|
||||
"type": "buttonedge",
|
||||
"id": "retrievalQAChain_0-retrievalQAChain_0-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain-chainTool_2-chainTool_2-input-baseChain-BaseChain",
|
||||
"data": {
|
||||
"label": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "openAI_4",
|
||||
"sourceHandle": "openAI_4-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain",
|
||||
"target": "retrievalQAChain_1",
|
||||
"targetHandle": "retrievalQAChain_1-input-model-BaseLanguageModel",
|
||||
"type": "buttonedge",
|
||||
"id": "openAI_4-openAI_4-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-retrievalQAChain_1-retrievalQAChain_1-input-model-BaseLanguageModel",
|
||||
"data": {
|
||||
"label": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "pineconeExistingIndex_1",
|
||||
"sourceHandle": "pineconeExistingIndex_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever",
|
||||
"target": "retrievalQAChain_1",
|
||||
"targetHandle": "retrievalQAChain_1-input-vectorStoreRetriever-BaseRetriever",
|
||||
"type": "buttonedge",
|
||||
"id": "pineconeExistingIndex_1-pineconeExistingIndex_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-retrievalQAChain_1-retrievalQAChain_1-input-vectorStoreRetriever-BaseRetriever",
|
||||
"data": {
|
||||
"label": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "retrievalQAChain_1",
|
||||
"sourceHandle": "retrievalQAChain_1-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain",
|
||||
"target": "chainTool_3",
|
||||
"targetHandle": "chainTool_3-input-baseChain-BaseChain",
|
||||
"type": "buttonedge",
|
||||
"id": "retrievalQAChain_1-retrievalQAChain_1-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain-chainTool_3-chainTool_3-input-baseChain-BaseChain",
|
||||
"data": {
|
||||
"label": ""
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -114,6 +114,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_2-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_2-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -461,6 +469,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_3-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_3-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
|
|||
|
|
@ -114,6 +114,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
|
|||
|
|
@ -102,6 +102,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
|
|||
|
|
@ -114,6 +114,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
|
|||
|
|
@ -172,6 +172,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
|
|||
|
|
@ -102,6 +102,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_0-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -235,6 +243,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_0-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAIEmbeddings_0-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
@ -362,6 +378,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_1-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "chatOpenAI_1-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
|
|||
|
|
@ -159,6 +159,14 @@
|
|||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_0-input-timeout-number"
|
||||
},
|
||||
{
|
||||
"label": "BasePath",
|
||||
"name": "basepath",
|
||||
"type": "string",
|
||||
"optional": true,
|
||||
"additionalParams": true,
|
||||
"id": "openAI_0-input-basepath-string"
|
||||
}
|
||||
],
|
||||
"inputAnchors": [],
|
||||
|
|
|
|||
Loading…
Reference in New Issue