add logs to component chains/agents

This commit is contained in:
Henry 2023-07-11 01:53:22 +01:00
parent 2bcc2f90b8
commit eb19c206cf
23 changed files with 414 additions and 199 deletions

View File

@ -90,7 +90,6 @@ class AutoGPT_Agents implements INode {
const res = await executor.run([input])
return res || 'I have completed all my tasks.'
} catch (e) {
console.error(e)
throw new Error(e)
}
}

View File

@ -1,10 +1,11 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { flatten } from 'lodash'
import { BaseChatMemory, ChatMessageHistory } from 'langchain/memory'
import { AIMessage, HumanMessage } from 'langchain/schema'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class OpenAIFunctionAgent_Agents implements INode {
label: string
@ -93,12 +94,14 @@ class OpenAIFunctionAgent_Agents implements INode {
executor.memory = memory
}
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const result = await executor.run(input, [handler])
const result = await executor.run(input, [loggerHandler, handler])
return result
} else {
const result = await executor.run(input)
const result = await executor.run(input, [loggerHandler])
return result
}
}

View File

@ -1,8 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { APIChain } from 'langchain/chains'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { PromptTemplate } from 'langchain/prompts'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
export const API_URL_RAW_PROMPT_TEMPLATE = `You are given the below API Documentation:
{api_docs}
@ -95,12 +96,14 @@ class GETApiChain_Chains implements INode {
const ansPrompt = nodeData.inputs?.ansPrompt as string
const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt)
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2)
const res = await chain.run(input, [handler])
const res = await chain.run(input, [loggerHandler, handler])
return res
} else {
const res = await chain.run(input)
const res = await chain.run(input, [loggerHandler])
return res
}
}

View File

@ -1,7 +1,8 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { APIChain, createOpenAPIChain } from 'langchain/chains'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { ChatOpenAI } from 'langchain/chat_models/openai'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class OpenApiChain_Chains implements INode {
label: string
@ -57,12 +58,14 @@ class OpenApiChain_Chains implements INode {
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const chain = await initChain(nodeData)
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const res = await chain.run(input, [handler])
const res = await chain.run(input, [loggerHandler, handler])
return res
} else {
const res = await chain.run(input)
const res = await chain.run(input, [loggerHandler])
return res
}
}

View File

@ -1,8 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { PromptTemplate } from 'langchain/prompts'
import { API_RESPONSE_RAW_PROMPT_TEMPLATE, API_URL_RAW_PROMPT_TEMPLATE, APIChain } from './postCore'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class POSTApiChain_Chains implements INode {
label: string
@ -84,12 +85,14 @@ class POSTApiChain_Chains implements INode {
const ansPrompt = nodeData.inputs?.ansPrompt as string
const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt)
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2)
const res = await chain.run(input, [handler])
const res = await chain.run(input, [loggerHandler, handler])
return res
} else {
const res = await chain.run(input)
const res = await chain.run(input, [loggerHandler])
return res
}
}

View File

@ -1,10 +1,11 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ConversationChain } from 'langchain/chains'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts'
import { BufferMemory, ChatMessageHistory } from 'langchain/memory'
import { BaseChatModel } from 'langchain/chat_models/base'
import { AIMessage, HumanMessage } from 'langchain/schema'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
const systemMessage = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.`
@ -90,12 +91,14 @@ class ConversationChain_Chains implements INode {
chain.memory = memory
}
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const res = await chain.call({ input }, [handler])
const res = await chain.call({ input }, [loggerHandler, handler])
return res?.response
} else {
const res = await chain.call({ input })
const res = await chain.call({ input }, [loggerHandler])
return res?.response
}
}

View File

@ -1,10 +1,11 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { ConversationalRetrievalQAChain } from 'langchain/chains'
import { AIMessage, BaseRetriever, HumanMessage } from 'langchain/schema'
import { BaseChatMemory, BufferMemory, ChatMessageHistory } from 'langchain/memory'
import { PromptTemplate } from 'langchain/prompts'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
const default_qa_template = `Use the following pieces of context to answer the question at the end, in its original language. If you don't know the answer, just say that you don't know in its original language, don't try to make up an answer.
@ -175,13 +176,15 @@ class ConversationalRetrievalQAChain_Chains implements INode {
chain.memory = memory
}
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, undefined, returnSourceDocuments)
const res = await chain.call(obj, [handler])
const res = await chain.call(obj, [loggerHandler, handler])
if (res.text && res.sourceDocuments) return res
return res?.text
} else {
const res = await chain.call(obj)
const res = await chain.call(obj, [loggerHandler])
if (res.text && res.sourceDocuments) return res
return res?.text
}

View File

@ -1,7 +1,8 @@
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class LLMChain_Chains implements INode {
label: string
@ -55,7 +56,7 @@ class LLMChain_Chains implements INode {
]
}
async init(nodeData: INodeData, input: string): Promise<any> {
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel
const prompt = nodeData.inputs?.prompt
const output = nodeData.outputs?.output as string
@ -67,7 +68,7 @@ class LLMChain_Chains implements INode {
} else if (output === 'outputPrediction') {
const chain = new LLMChain({ llm: model, prompt, verbose: process.env.DEBUG === 'true' ? true : false })
const inputVariables = chain.prompt.inputVariables as string[] // ["product"]
const res = await runPrediction(inputVariables, chain, input, promptValues)
const res = await runPrediction(inputVariables, chain, input, promptValues, options)
// eslint-disable-next-line no-console
console.log('\x1b[92m\x1b[1m\n*****OUTPUT PREDICTION*****\n\x1b[0m\x1b[0m')
// eslint-disable-next-line no-console
@ -81,9 +82,7 @@ class LLMChain_Chains implements INode {
const chain = nodeData.instance as LLMChain
const promptValues = nodeData.inputs?.prompt.promptValues as ICommonObject
const res = options.socketIO
? await runPrediction(inputVariables, chain, input, promptValues, true, options.socketIO, options.socketIOClientId)
: await runPrediction(inputVariables, chain, input, promptValues)
const res = await runPrediction(inputVariables, chain, input, promptValues, options)
// eslint-disable-next-line no-console
console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m')
// eslint-disable-next-line no-console
@ -97,17 +96,20 @@ const runPrediction = async (
chain: LLMChain,
input: string,
promptValues: ICommonObject,
isStreaming?: boolean,
socketIO?: any,
socketIOClientId = ''
options: ICommonObject
) => {
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const isStreaming = options.socketIO && options.socketIOClientId
const socketIO = isStreaming ? options.socketIO : undefined
const socketIOClientId = isStreaming ? options.socketIOClientId : ''
if (inputVariables.length === 1) {
if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.run(input, [handler])
const res = await chain.run(input, [loggerHandler, handler])
return res
} else {
const res = await chain.run(input)
const res = await chain.run(input, [loggerHandler])
return res
}
} else if (inputVariables.length > 1) {
@ -122,15 +124,13 @@ const runPrediction = async (
if (seen.length === 0) {
// All inputVariables have fixed values specified
const options = {
...promptValues
}
const options = { ...promptValues }
if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.call(options, [handler])
const res = await chain.call(options, [loggerHandler, handler])
return res?.text
} else {
const res = await chain.call(options)
const res = await chain.call(options, [loggerHandler])
return res?.text
}
} else if (seen.length === 1) {
@ -143,10 +143,10 @@ const runPrediction = async (
}
if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.call(options, [handler])
const res = await chain.call(options, [loggerHandler, handler])
return res?.text
} else {
const res = await chain.call(options)
const res = await chain.call(options, [loggerHandler])
return res?.text
}
} else {
@ -155,10 +155,10 @@ const runPrediction = async (
} else {
if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.run(input, [handler])
const res = await chain.run(input, [loggerHandler, handler])
return res
} else {
const res = await chain.run(input)
const res = await chain.run(input, [loggerHandler])
return res
}
}

View File

@ -1,7 +1,8 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { ICommonObject, INode, INodeData, INodeParams, PromptRetriever } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { MultiPromptChain } from 'langchain/chains'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class MultiPromptChain_Chains implements INode {
label: string
@ -63,12 +64,14 @@ class MultiPromptChain_Chains implements INode {
const chain = nodeData.instance as MultiPromptChain
const obj = { input }
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2)
const res = await chain.call(obj, [handler])
const res = await chain.call(obj, [loggerHandler, handler])
return res?.text
} else {
const res = await chain.call(obj)
const res = await chain.call(obj, [loggerHandler])
return res?.text
}
}

View File

@ -1,7 +1,8 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { ICommonObject, INode, INodeData, INodeParams, VectorStoreRetriever } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { MultiRetrievalQAChain } from 'langchain/chains'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class MultiRetrievalQAChain_Chains implements INode {
label: string
@ -71,14 +72,15 @@ class MultiRetrievalQAChain_Chains implements INode {
const returnSourceDocuments = nodeData.inputs?.returnSourceDocuments as boolean
const obj = { input }
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2, returnSourceDocuments)
const res = await chain.call(obj, [handler])
const res = await chain.call(obj, [loggerHandler, handler])
if (res.text && res.sourceDocuments) return res
return res?.text
} else {
const res = await chain.call(obj)
const res = await chain.call(obj, [loggerHandler])
if (res.text && res.sourceDocuments) return res
return res?.text
}

View File

@ -1,8 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { RetrievalQAChain } from 'langchain/chains'
import { BaseRetriever } from 'langchain/schema'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class RetrievalQAChain_Chains implements INode {
label: string
@ -49,13 +50,14 @@ class RetrievalQAChain_Chains implements INode {
const obj = {
query: input
}
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const res = await chain.call(obj, [handler])
const res = await chain.call(obj, [loggerHandler, handler])
return res?.text
} else {
const res = await chain.call(obj)
const res = await chain.call(obj, [loggerHandler])
return res?.text
}
}

View File

@ -1,9 +1,10 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { SqlDatabaseChain, SqlDatabaseChainInput } from 'langchain/chains/sql_db'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { DataSource } from 'typeorm'
import { SqlDatabase } from 'langchain/sql_db'
import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class SqlDatabaseChain_Chains implements INode {
label: string
@ -65,12 +66,14 @@ class SqlDatabaseChain_Chains implements INode {
const dbFilePath = nodeData.inputs?.dbFilePath
const chain = await getSQLDBChain(databaseType, dbFilePath, model)
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2)
const res = await chain.run(input, [handler])
const res = await chain.run(input, [loggerHandler, handler])
return res
} else {
const res = await chain.run(input)
const res = await chain.run(input, [loggerHandler])
return res
}
}

View File

@ -1,8 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { VectorDBQAChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { VectorStore } from 'langchain/vectorstores'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class VectorDBQAChain_Chains implements INode {
label: string
@ -53,12 +54,14 @@ class VectorDBQAChain_Chains implements INode {
query: input
}
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const res = await chain.call(obj, [handler])
const res = await chain.call(obj, [loggerHandler, handler])
return res?.text
} else {
const res = await chain.call(obj)
const res = await chain.call(obj, [loggerHandler])
return res?.text
}
}

View File

@ -0,0 +1,180 @@
import { BaseTracer, Run, BaseCallbackHandler } from 'langchain/callbacks'
import { AgentAction, ChainValues } from 'langchain/schema'
import { Logger } from 'winston'
import { Server } from 'socket.io'
interface AgentRun extends Run {
actions: AgentAction[]
}
function tryJsonStringify(obj: unknown, fallback: string) {
try {
return JSON.stringify(obj, null, 2)
} catch (err) {
return fallback
}
}
function elapsed(run: Run): string {
if (!run.end_time) return ''
const elapsed = run.end_time - run.start_time
if (elapsed < 1000) {
return `${elapsed}ms`
}
return `${(elapsed / 1000).toFixed(2)}s`
}
export class ConsoleCallbackHandler extends BaseTracer {
name = 'console_callback_handler' as const
logger: Logger
protected persistRun(_run: Run) {
return Promise.resolve()
}
constructor(logger: Logger) {
super()
this.logger = logger
}
// utility methods
getParents(run: Run) {
const parents: Run[] = []
let currentRun = run
while (currentRun.parent_run_id) {
const parent = this.runMap.get(currentRun.parent_run_id)
if (parent) {
parents.push(parent)
currentRun = parent
} else {
break
}
}
return parents
}
getBreadcrumbs(run: Run) {
const parents = this.getParents(run).reverse()
const string = [...parents, run]
.map((parent) => {
const name = `${parent.execution_order}:${parent.run_type}:${parent.name}`
return name
})
.join(' > ')
return string
}
// logging methods
onChainStart(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(`[chain/start] [${crumbs}] Entering Chain run with input: ${tryJsonStringify(run.inputs, '[inputs]')}`)
}
onChainEnd(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(
`[chain/end] [${crumbs}] [${elapsed(run)}] Exiting Chain run with output: ${tryJsonStringify(run.outputs, '[outputs]')}`
)
}
onChainError(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(
`[chain/error] [${crumbs}] [${elapsed(run)}] Chain run errored with error: ${tryJsonStringify(run.error, '[error]')}`
)
}
onLLMStart(run: Run) {
const crumbs = this.getBreadcrumbs(run)
const inputs = 'prompts' in run.inputs ? { prompts: (run.inputs.prompts as string[]).map((p) => p.trim()) } : run.inputs
this.logger.verbose(`[llm/start] [${crumbs}] Entering LLM run with input: ${tryJsonStringify(inputs, '[inputs]')}`)
}
onLLMEnd(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(
`[llm/end] [${crumbs}] [${elapsed(run)}] Exiting LLM run with output: ${tryJsonStringify(run.outputs, '[response]')}`
)
}
onLLMError(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(
`[llm/error] [${crumbs}] [${elapsed(run)}] LLM run errored with error: ${tryJsonStringify(run.error, '[error]')}`
)
}
onToolStart(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(`[tool/start] [${crumbs}] Entering Tool run with input: "${run.inputs.input?.trim()}"`)
}
onToolEnd(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(`[tool/end] [${crumbs}] [${elapsed(run)}] Exiting Tool run with output: "${run.outputs?.output?.trim()}"`)
}
onToolError(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(
`[tool/error] [${crumbs}] [${elapsed(run)}] Tool run errored with error: ${tryJsonStringify(run.error, '[error]')}`
)
}
onAgentAction(run: Run) {
const agentRun = run as AgentRun
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(
`[agent/action] [${crumbs}] Agent selected action: ${tryJsonStringify(
agentRun.actions[agentRun.actions.length - 1],
'[action]'
)}`
)
}
}
/**
* Custom chain handler class
*/
export class CustomChainHandler extends BaseCallbackHandler {
name = 'custom_chain_handler'
isLLMStarted = false
socketIO: Server
socketIOClientId = ''
skipK = 0 // Skip streaming for first K numbers of handleLLMStart
returnSourceDocuments = false
constructor(socketIO: Server, socketIOClientId: string, skipK?: number, returnSourceDocuments?: boolean) {
super()
this.socketIO = socketIO
this.socketIOClientId = socketIOClientId
this.skipK = skipK ?? this.skipK
this.returnSourceDocuments = returnSourceDocuments ?? this.returnSourceDocuments
}
handleLLMStart() {
if (this.skipK > 0) this.skipK -= 1
}
handleLLMNewToken(token: string) {
if (this.skipK === 0) {
if (!this.isLLMStarted) {
this.isLLMStarted = true
this.socketIO.to(this.socketIOClientId).emit('start', token)
}
this.socketIO.to(this.socketIOClientId).emit('token', token)
}
}
handleLLMEnd() {
this.socketIO.to(this.socketIOClientId).emit('end')
}
handleChainEnd(outputs: ChainValues): void | Promise<void> {
if (this.returnSourceDocuments) {
this.socketIO.to(this.socketIOClientId).emit('sourceDocuments', outputs?.sourceDocuments)
}
}
}

View File

@ -3,9 +3,6 @@ import { load } from 'cheerio'
import * as fs from 'fs'
import * as path from 'path'
import { JSDOM } from 'jsdom'
import { BaseCallbackHandler } from 'langchain/callbacks'
import { Server } from 'socket.io'
import { ChainValues } from 'langchain/dist/schema'
export const numberOrExpressionRegex = '^(\\d+\\.?\\d*|{{.*}})$' //return true if string consists only numbers OR expression {{}}
export const notEmptyRegex = '(.|\\s)*\\S(.|\\s)*' //return true if string is not empty or blank
@ -350,50 +347,9 @@ export const getEnvironmentVariable = (name: string): string | undefined => {
}
}
/**
* Custom chain handler class
/*
* List of dependencies allowed to be import in vm2
*/
export class CustomChainHandler extends BaseCallbackHandler {
name = 'custom_chain_handler'
isLLMStarted = false
socketIO: Server
socketIOClientId = ''
skipK = 0 // Skip streaming for first K numbers of handleLLMStart
returnSourceDocuments = false
constructor(socketIO: Server, socketIOClientId: string, skipK?: number, returnSourceDocuments?: boolean) {
super()
this.socketIO = socketIO
this.socketIOClientId = socketIOClientId
this.skipK = skipK ?? this.skipK
this.returnSourceDocuments = returnSourceDocuments ?? this.returnSourceDocuments
}
handleLLMStart() {
if (this.skipK > 0) this.skipK -= 1
}
handleLLMNewToken(token: string) {
if (this.skipK === 0) {
if (!this.isLLMStarted) {
this.isLLMStarted = true
this.socketIO.to(this.socketIOClientId).emit('start', token)
}
this.socketIO.to(this.socketIOClientId).emit('token', token)
}
}
handleLLMEnd() {
this.socketIO.to(this.socketIOClientId).emit('end')
}
handleChainEnd(outputs: ChainValues): void | Promise<void> {
if (this.returnSourceDocuments) {
this.socketIO.to(this.socketIOClientId).emit('sourceDocuments', outputs?.sourceDocuments)
}
}
}
export const availableDependencies = [
'@dqbd/tiktoken',
'@getzep/zep-js',

View File

@ -5,4 +5,5 @@ PORT=3000
# DATABASE_PATH=/your_database_path/.flowise
# APIKEY_PATH=/your_api_key_path/.flowise
# LOG_PATH=/your_log_path/logs
# EXECUTION_MODE=child or main
# LOG_LEVEL=debug (error | warn | info | verbose | debug)
# EXECUTION_MODE=main (child | main)

View File

@ -1,5 +1,6 @@
import { ICommonObject } from 'flowise-components'
import { IActiveChatflows, INodeData, IReactFlowNode } from './Interface'
import logger from './utils/logger'
/**
* This pool is to keep track of active chatflow pools
@ -22,6 +23,7 @@ export class ChatflowPool {
inSync: true
}
if (overrideConfig) this.activeChatflows[chatflowid].overrideConfig = overrideConfig
logger.info(`[server]: Chatflow ${chatflowid} added into ChatflowPool`)
}
/**
@ -32,6 +34,7 @@ export class ChatflowPool {
updateInSync(chatflowid: string, inSync: boolean) {
if (Object.prototype.hasOwnProperty.call(this.activeChatflows, chatflowid)) {
this.activeChatflows[chatflowid].inSync = inSync
logger.info(`[server]: Chatflow ${chatflowid} updated inSync=${inSync} in ChatflowPool`)
}
}
@ -42,6 +45,7 @@ export class ChatflowPool {
async remove(chatflowid: string) {
if (Object.prototype.hasOwnProperty.call(this.activeChatflows, chatflowid)) {
delete this.activeChatflows[chatflowid]
logger.info(`[server]: Chatflow ${chatflowid} removed from ChatflowPool`)
}
}
}

View File

@ -5,6 +5,7 @@ import { DataSource } from 'typeorm'
import { ChatFlow } from './entity/ChatFlow'
import { ChatMessage } from './entity/ChatMessage'
import { Tool } from './entity/Tool'
import logger from './utils/logger'
export class ChildProcess {
/**
@ -27,6 +28,7 @@ export class ChildProcess {
await sendToParentProcess('start', '_')
try {
const childAppDataSource = await initDB()
// Create a Queue and add our initial node in it
@ -55,12 +57,17 @@ export class ChildProcess {
const directedGraph = graph
const endingNodeId = getEndingNode(nodeDependencies, directedGraph)
if (!endingNodeId) {
await sendToParentProcess('error', `Ending node must be either a Chain or Agent`)
await sendToParentProcess('error', `Ending node ${endingNodeId} not found`)
return
}
const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data
if (!endingNodeData) {
await sendToParentProcess('error', `Ending node ${endingNodeId} data not found`)
return
}
if (endingNodeData && endingNodeData.category !== 'Chains' && endingNodeData.category !== 'Agents') {
await sendToParentProcess('error', `Ending node must be either a Chain or Agent`)
return
}
@ -82,6 +89,7 @@ export class ChildProcess {
const nonDirectedGraph = constructedObj.graph
const { startingNodeIds, depthQueue } = getStartingNodes(nonDirectedGraph, endingNodeId)
logger.debug(`[server] [mode:child]: Start building chatflow ${chatflow.id}`)
/*** BFS to traverse from Starting Nodes to Ending Node ***/
const reactFlowNodes = await buildLangchain(
startingNodeIds,
@ -117,9 +125,15 @@ export class ChildProcess {
const nodeModule = await import(nodeInstanceFilePath)
const nodeInstance = new nodeModule.nodeClass()
logger.debug(`[server] [mode:child]: Running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`)
const result = await nodeInstance.run(nodeToExecuteData, incomingInput.question, { chatHistory: incomingInput.history })
logger.debug(`[server] [mode:child]: Finished running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`)
await sendToParentProcess('finish', { result, addToChatFlowPool })
} catch (e: any) {
await sendToParentProcess('error', e.message)
logger.error('[server] [mode:child]: Error:', e)
}
}
}

View File

@ -23,6 +23,7 @@ export default class Start extends Command {
DATABASE_PATH: Flags.string(),
APIKEY_PATH: Flags.string(),
LOG_PATH: Flags.string(),
LOG_LEVEL: Flags.string(),
EXECUTION_MODE: Flags.string()
}
@ -61,6 +62,7 @@ export default class Start extends Command {
if (flags.DATABASE_PATH) process.env.DATABASE_PATH = flags.DATABASE_PATH
if (flags.APIKEY_PATH) process.env.APIKEY_PATH = flags.APIKEY_PATH
if (flags.LOG_PATH) process.env.LOG_PATH = flags.LOG_PATH
if (flags.LOG_LEVEL) process.env.LOG_LEVEL = flags.LOG_LEVEL
if (flags.EXECUTION_MODE) process.env.EXECUTION_MODE = flags.EXECUTION_MODE
if (flags.DEBUG) process.env.DEBUG = flags.DEBUG

View File

@ -283,10 +283,16 @@ export class App {
const nodes = parsedFlowData.nodes
const edges = parsedFlowData.edges
const { graph, nodeDependencies } = constructGraphs(nodes, edges)
const endingNodeId = getEndingNode(nodeDependencies, graph)
if (!endingNodeId) return res.status(500).send(`Ending node must be either a Chain or Agent`)
if (!endingNodeId) return res.status(500).send(`Ending node ${endingNodeId} not found`)
const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data
if (!endingNodeData) return res.status(500).send(`Ending node must be either a Chain or Agent`)
if (!endingNodeData) return res.status(500).send(`Ending node ${endingNodeId} data not found`)
if (endingNodeData && endingNodeData.category !== 'Chains' && endingNodeData.category !== 'Agents') {
return res.status(500).send(`Ending node must be either a Chain or Agent`)
}
const obj = {
isStreaming: isFlowValidForStream(nodes, endingNodeData)
@ -638,7 +644,7 @@ export class App {
})
})
} catch (err) {
logger.error(err)
logger.error('[server] [mode:child]: Error:', err)
}
}
@ -714,9 +720,11 @@ export class App {
if (process.env.EXECUTION_MODE === 'child') {
if (isFlowReusable()) {
nodeToExecuteData = this.chatflowPool.activeChatflows[chatflowid].endingNodeData
logger.debug(
`[server] [mode:child]: Reuse existing chatflow ${chatflowid} with ending node ${nodeToExecuteData.label} (${nodeToExecuteData.id})`
)
try {
const result = await this.startChildProcess(chatflow, chatId, incomingInput, nodeToExecuteData)
return res.json(result)
} catch (error) {
return res.status(500).send(error)
@ -739,15 +747,22 @@ export class App {
if (isFlowReusable()) {
nodeToExecuteData = this.chatflowPool.activeChatflows[chatflowid].endingNodeData
isStreamValid = isFlowValidForStream(nodes, nodeToExecuteData)
logger.debug(
`[server]: Reuse existing chatflow ${chatflowid} with ending node ${nodeToExecuteData.label} (${nodeToExecuteData.id})`
)
} else {
/*** Get Ending Node with Directed Graph ***/
const { graph, nodeDependencies } = constructGraphs(nodes, edges)
const directedGraph = graph
const endingNodeId = getEndingNode(nodeDependencies, directedGraph)
if (!endingNodeId) return res.status(500).send(`Ending node must be either a Chain or Agent`)
if (!endingNodeId) return res.status(500).send(`Ending node ${endingNodeId} not found`)
const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data
if (!endingNodeData) return res.status(500).send(`Ending node must be either a Chain or Agent`)
if (!endingNodeData) return res.status(500).send(`Ending node ${endingNodeId} data not found`)
if (endingNodeData && endingNodeData.category !== 'Chains' && endingNodeData.category !== 'Agents') {
return res.status(500).send(`Ending node must be either a Chain or Agent`)
}
if (
endingNodeData.outputs &&
@ -768,6 +783,7 @@ export class App {
const nonDirectedGraph = constructedObj.graph
const { startingNodeIds, depthQueue } = getStartingNodes(nonDirectedGraph, endingNodeId)
logger.debug(`[server]: Start building chatflow ${chatflowid}`)
/*** BFS to traverse from Starting Nodes to Ending Node ***/
const reactFlowNodes = await buildLangchain(
startingNodeIds,
@ -796,17 +812,21 @@ export class App {
const nodeInstance = new nodeModule.nodeClass()
isStreamValid = isStreamValid && !isVectorStoreFaiss(nodeToExecuteData)
logger.debug(`[server]: Running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`)
const result = isStreamValid
? await nodeInstance.run(nodeToExecuteData, incomingInput.question, {
chatHistory: incomingInput.history,
socketIO,
socketIOClientId: incomingInput.socketIOClientId
socketIOClientId: incomingInput.socketIOClientId,
logger
})
: await nodeInstance.run(nodeToExecuteData, incomingInput.question, { chatHistory: incomingInput.history })
: await nodeInstance.run(nodeToExecuteData, incomingInput.question, { chatHistory: incomingInput.history, logger })
logger.debug(`[server]: Finished running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`)
return res.json(result)
}
} catch (e: any) {
logger.error('[server]: Error:', e)
return res.status(500).send(e.message)
}
}

View File

@ -9,12 +9,12 @@ dotenv.config({ path: path.join(__dirname, '..', '..', '.env'), override: true }
const loggingConfig = {
dir: process.env.LOG_PATH ?? path.join(__dirname, '..', '..', '..', '..', 'logs'),
server: {
level: 'info',
level: process.env.LOG_LEVEL ?? 'info',
filename: 'server.log',
errorFilename: 'server-error.log'
},
express: {
level: 'info',
level: process.env.LOG_LEVEL ?? 'info',
format: 'jsonl', // can't be changed currently
filename: 'server-requests.log.jsonl' // should end with .jsonl
}

View File

@ -180,6 +180,9 @@ export const getEndingNode = (nodeDependencies: INodeDependencies, graph: INodeD
* @param {IDepthQueue} depthQueue
* @param {IComponentNodes} componentNodes
* @param {string} question
* @param {string} chatId
* @param {DataSource} appDataSource
* @param {ICommonObject} overrideConfig
*/
export const buildLangchain = async (
startingNodeIds: string[],
@ -222,11 +225,14 @@ export const buildLangchain = async (
if (overrideConfig) flowNodeData = replaceInputsWithConfig(flowNodeData, overrideConfig)
const reactFlowNodeData: INodeData = resolveVariables(flowNodeData, flowNodes, question)
logger.debug(`[server]: Initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`)
flowNodes[nodeIndex].data.instance = await newNodeInstance.init(reactFlowNodeData, question, {
chatId,
appDataSource,
databaseEntities
databaseEntities,
logger
})
logger.debug(`[server]: Finished initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`)
} catch (e: any) {
logger.error(e)
throw new Error(e)

View File

@ -4,7 +4,7 @@ import config from './config' // should be replaced by node-config or similar
import { createLogger, transports, format } from 'winston'
import { NextFunction, Request, Response } from 'express'
const { combine, timestamp, printf } = format
const { combine, timestamp, printf, errors } = format
// expect the log dir be relative to the projects root
const logDir = config.logging.dir
@ -18,9 +18,11 @@ const logger = createLogger({
format: combine(
timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
format.json(),
printf(({ level, message, timestamp }) => {
return `${timestamp} [${level.toUpperCase()}]: ${message}`
})
printf(({ level, message, timestamp, stack }) => {
const text = `${timestamp} [${level.toUpperCase()}]: ${message}`
return stack ? text + '\n' + stack : text
}),
errors({ stack: true })
),
defaultMeta: {
package: 'server'
@ -56,7 +58,7 @@ const logger = createLogger({
*/
export function expressRequestLogger(req: Request, res: Response, next: NextFunction): void {
const fileLogger = createLogger({
format: combine(timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), format.json()),
format: combine(timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), format.json(), errors({ stack: true })),
defaultMeta: {
package: 'server',
request: {
@ -71,7 +73,7 @@ export function expressRequestLogger(req: Request, res: Response, next: NextFunc
transports: [
new transports.File({
filename: path.join(logDir, config.logging.express.filename ?? 'server-requests.log.jsonl'),
level: 'debug'
level: config.logging.express.level ?? 'debug'
})
]
})