change flat to flatten from lodash

This commit is contained in:
Henry 2023-06-10 01:34:37 +01:00
parent 66bfd536a1
commit ffebe7c769
10 changed files with 20 additions and 10 deletions

View File

@ -3,6 +3,7 @@ import { BaseChatModel } from 'langchain/chat_models/base'
import { AutoGPT } from 'langchain/experimental/autogpt'
import { Tool } from 'langchain/tools'
import { VectorStoreRetriever } from 'langchain/vectorstores/base'
import { flatten } from 'lodash'
class AutoGPT_Agents implements INode {
label: string
@ -67,7 +68,7 @@ class AutoGPT_Agents implements INode {
const model = nodeData.inputs?.model as BaseChatModel
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as VectorStoreRetriever
let tools = nodeData.inputs?.tools as Tool[]
tools = tools.flat()
tools = flatten(tools)
const aiName = (nodeData.inputs?.aiName as string) || 'AutoGPT'
const aiRole = (nodeData.inputs?.aiRole as string) || 'Assistant'
const maxLoop = nodeData.inputs?.maxLoop as string

View File

@ -5,6 +5,7 @@ import { BaseChatMemory, ChatMessageHistory } from 'langchain/memory'
import { getBaseClasses } from '../../../src/utils'
import { AIChatMessage, HumanChatMessage } from 'langchain/schema'
import { BaseLanguageModel } from 'langchain/base_language'
import { flatten } from 'lodash'
class ConversationalAgent_Agents implements INode {
label: string
@ -63,7 +64,7 @@ class ConversationalAgent_Agents implements INode {
async init(nodeData: INodeData): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel
let tools = nodeData.inputs?.tools as Tool[]
tools = tools.flat()
tools = flatten(tools)
const memory = nodeData.inputs?.memory as BaseChatMemory
const humanMessage = nodeData.inputs?.humanMessage as string
const systemMessage = nodeData.inputs?.systemMessage as string

View File

@ -3,6 +3,7 @@ import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/age
import { getBaseClasses } from '../../../src/utils'
import { Tool } from 'langchain/tools'
import { BaseLanguageModel } from 'langchain/base_language'
import { flatten } from 'lodash'
class MRKLAgentChat_Agents implements INode {
label: string
@ -40,7 +41,7 @@ class MRKLAgentChat_Agents implements INode {
async init(nodeData: INodeData): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel
let tools = nodeData.inputs?.tools as Tool[]
tools = tools.flat()
tools = flatten(tools)
const executor = await initializeAgentExecutorWithOptions(tools, model, {
agentType: 'chat-zero-shot-react-description',
verbose: process.env.DEBUG === 'true' ? true : false

View File

@ -3,6 +3,7 @@ import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/age
import { Tool } from 'langchain/tools'
import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { flatten } from 'lodash'
class MRKLAgentLLM_Agents implements INode {
label: string
@ -40,7 +41,7 @@ class MRKLAgentLLM_Agents implements INode {
async init(nodeData: INodeData): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel
let tools = nodeData.inputs?.tools as Tool[]
tools = tools.flat()
tools = flatten(tools)
const executor = await initializeAgentExecutorWithOptions(tools, model, {
agentType: 'zero-shot-react-description',

View File

@ -3,6 +3,7 @@ import { Chroma } from 'langchain/vectorstores/chroma'
import { Embeddings } from 'langchain/embeddings/base'
import { Document } from 'langchain/document'
import { getBaseClasses } from '../../../src/utils'
import { flatten } from 'lodash'
class ChromaUpsert_VectorStores implements INode {
label: string
@ -68,7 +69,7 @@ class ChromaUpsert_VectorStores implements INode {
const chromaURL = nodeData.inputs?.chromaURL as string
const output = nodeData.outputs?.output as string
const flattenDocs = docs && docs.length ? docs.flat() : []
const flattenDocs = docs && docs.length ? flatten(docs) : []
const finalDocs = []
for (let i = 0; i < flattenDocs.length; i += 1) {
finalDocs.push(new Document(flattenDocs[i]))

View File

@ -3,6 +3,7 @@ import { Embeddings } from 'langchain/embeddings/base'
import { Document } from 'langchain/document'
import { getBaseClasses } from '../../../src/utils'
import { FaissStore } from 'langchain/vectorstores/faiss'
import { flatten } from 'lodash'
class FaissUpsert_VectorStores implements INode {
label: string
@ -63,7 +64,7 @@ class FaissUpsert_VectorStores implements INode {
const output = nodeData.outputs?.output as string
const basePath = nodeData.inputs?.basePath as string
const flattenDocs = docs && docs.length ? docs.flat() : []
const flattenDocs = docs && docs.length ? flatten(docs) : []
const finalDocs = []
for (let i = 0; i < flattenDocs.length; i += 1) {
finalDocs.push(new Document(flattenDocs[i]))

View File

@ -3,6 +3,7 @@ import { MemoryVectorStore } from 'langchain/vectorstores/memory'
import { Embeddings } from 'langchain/embeddings/base'
import { Document } from 'langchain/document'
import { getBaseClasses } from '../../../src/utils'
import { flatten } from 'lodash'
class InMemoryVectorStore_VectorStores implements INode {
label: string
@ -55,7 +56,7 @@ class InMemoryVectorStore_VectorStores implements INode {
const embeddings = nodeData.inputs?.embeddings as Embeddings
const output = nodeData.outputs?.output as string
const flattenDocs = docs && docs.length ? docs.flat() : []
const flattenDocs = docs && docs.length ? flatten(docs) : []
const finalDocs = []
for (let i = 0; i < flattenDocs.length; i += 1) {
finalDocs.push(new Document(flattenDocs[i]))

View File

@ -4,6 +4,7 @@ import { PineconeLibArgs, PineconeStore } from 'langchain/vectorstores/pinecone'
import { Embeddings } from 'langchain/embeddings/base'
import { Document } from 'langchain/document'
import { getBaseClasses } from '../../../src/utils'
import { flatten } from 'lodash'
class PineconeUpsert_VectorStores implements INode {
label: string
@ -90,7 +91,7 @@ class PineconeUpsert_VectorStores implements INode {
const pineconeIndex = client.Index(index)
const flattenDocs = docs && docs.length ? docs.flat() : []
const flattenDocs = docs && docs.length ? flatten(docs) : []
const finalDocs = []
for (let i = 0; i < flattenDocs.length; i += 1) {
finalDocs.push(new Document(flattenDocs[i]))

View File

@ -4,6 +4,7 @@ import { Document } from 'langchain/document'
import { getBaseClasses } from '../../../src/utils'
import { SupabaseVectorStore } from 'langchain/vectorstores/supabase'
import { createClient } from '@supabase/supabase-js'
import { flatten } from 'lodash'
class SupabaseUpsert_VectorStores implements INode {
label: string
@ -82,7 +83,7 @@ class SupabaseUpsert_VectorStores implements INode {
const client = createClient(supabaseProjUrl, supabaseApiKey)
const flattenDocs = docs && docs.length ? docs.flat() : []
const flattenDocs = docs && docs.length ? flatten(docs) : []
const finalDocs = []
for (let i = 0; i < flattenDocs.length; i += 1) {
finalDocs.push(new Document(flattenDocs[i]))

View File

@ -4,6 +4,7 @@ import { Document } from 'langchain/document'
import { getBaseClasses } from '../../../src/utils'
import { WeaviateLibArgs, WeaviateStore } from 'langchain/vectorstores/weaviate'
import weaviate, { WeaviateClient, ApiKey } from 'weaviate-ts-client'
import { flatten } from 'lodash'
class WeaviateUpsert_VectorStores implements INode {
label: string
@ -122,7 +123,7 @@ class WeaviateUpsert_VectorStores implements INode {
const client: WeaviateClient = weaviate.client(clientConfig)
const flattenDocs = docs && docs.length ? docs.flat() : []
const flattenDocs = docs && docs.length ? flatten(docs) : []
const finalDocs = []
for (let i = 0; i < flattenDocs.length; i += 1) {
finalDocs.push(new Document(flattenDocs[i]))