add logs to component chains/agents

This commit is contained in:
Henry 2023-07-11 01:53:22 +01:00
parent 2bcc2f90b8
commit eb19c206cf
23 changed files with 414 additions and 199 deletions

View File

@ -90,7 +90,6 @@ class AutoGPT_Agents implements INode {
const res = await executor.run([input]) const res = await executor.run([input])
return res || 'I have completed all my tasks.' return res || 'I have completed all my tasks.'
} catch (e) { } catch (e) {
console.error(e)
throw new Error(e) throw new Error(e)
} }
} }

View File

@ -1,10 +1,11 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface' import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents' import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language' import { BaseLanguageModel } from 'langchain/base_language'
import { flatten } from 'lodash' import { flatten } from 'lodash'
import { BaseChatMemory, ChatMessageHistory } from 'langchain/memory' import { BaseChatMemory, ChatMessageHistory } from 'langchain/memory'
import { AIMessage, HumanMessage } from 'langchain/schema' import { AIMessage, HumanMessage } from 'langchain/schema'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class OpenAIFunctionAgent_Agents implements INode { class OpenAIFunctionAgent_Agents implements INode {
label: string label: string
@ -93,12 +94,14 @@ class OpenAIFunctionAgent_Agents implements INode {
executor.memory = memory executor.memory = memory
} }
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) { if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId) const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const result = await executor.run(input, [handler]) const result = await executor.run(input, [loggerHandler, handler])
return result return result
} else { } else {
const result = await executor.run(input) const result = await executor.run(input, [loggerHandler])
return result return result
} }
} }

View File

@ -1,8 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { APIChain } from 'langchain/chains' import { APIChain } from 'langchain/chains'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language' import { BaseLanguageModel } from 'langchain/base_language'
import { PromptTemplate } from 'langchain/prompts' import { PromptTemplate } from 'langchain/prompts'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
export const API_URL_RAW_PROMPT_TEMPLATE = `You are given the below API Documentation: export const API_URL_RAW_PROMPT_TEMPLATE = `You are given the below API Documentation:
{api_docs} {api_docs}
@ -95,12 +96,14 @@ class GETApiChain_Chains implements INode {
const ansPrompt = nodeData.inputs?.ansPrompt as string const ansPrompt = nodeData.inputs?.ansPrompt as string
const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt) const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt)
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) { if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2) const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2)
const res = await chain.run(input, [handler]) const res = await chain.run(input, [loggerHandler, handler])
return res return res
} else { } else {
const res = await chain.run(input) const res = await chain.run(input, [loggerHandler])
return res return res
} }
} }

View File

@ -1,7 +1,8 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { APIChain, createOpenAPIChain } from 'langchain/chains' import { APIChain, createOpenAPIChain } from 'langchain/chains'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { ChatOpenAI } from 'langchain/chat_models/openai' import { ChatOpenAI } from 'langchain/chat_models/openai'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class OpenApiChain_Chains implements INode { class OpenApiChain_Chains implements INode {
label: string label: string
@ -57,12 +58,14 @@ class OpenApiChain_Chains implements INode {
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> { async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const chain = await initChain(nodeData) const chain = await initChain(nodeData)
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) { if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId) const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const res = await chain.run(input, [handler]) const res = await chain.run(input, [loggerHandler, handler])
return res return res
} else { } else {
const res = await chain.run(input) const res = await chain.run(input, [loggerHandler])
return res return res
} }
} }

View File

@ -1,8 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language' import { BaseLanguageModel } from 'langchain/base_language'
import { PromptTemplate } from 'langchain/prompts' import { PromptTemplate } from 'langchain/prompts'
import { API_RESPONSE_RAW_PROMPT_TEMPLATE, API_URL_RAW_PROMPT_TEMPLATE, APIChain } from './postCore' import { API_RESPONSE_RAW_PROMPT_TEMPLATE, API_URL_RAW_PROMPT_TEMPLATE, APIChain } from './postCore'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class POSTApiChain_Chains implements INode { class POSTApiChain_Chains implements INode {
label: string label: string
@ -84,12 +85,14 @@ class POSTApiChain_Chains implements INode {
const ansPrompt = nodeData.inputs?.ansPrompt as string const ansPrompt = nodeData.inputs?.ansPrompt as string
const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt) const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt)
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) { if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2) const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2)
const res = await chain.run(input, [handler]) const res = await chain.run(input, [loggerHandler, handler])
return res return res
} else { } else {
const res = await chain.run(input) const res = await chain.run(input, [loggerHandler])
return res return res
} }
} }

View File

@ -1,10 +1,11 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface' import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ConversationChain } from 'langchain/chains' import { ConversationChain } from 'langchain/chains'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts' import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts'
import { BufferMemory, ChatMessageHistory } from 'langchain/memory' import { BufferMemory, ChatMessageHistory } from 'langchain/memory'
import { BaseChatModel } from 'langchain/chat_models/base' import { BaseChatModel } from 'langchain/chat_models/base'
import { AIMessage, HumanMessage } from 'langchain/schema' import { AIMessage, HumanMessage } from 'langchain/schema'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
const systemMessage = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.` const systemMessage = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.`
@ -90,12 +91,14 @@ class ConversationChain_Chains implements INode {
chain.memory = memory chain.memory = memory
} }
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) { if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId) const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const res = await chain.call({ input }, [handler]) const res = await chain.call({ input }, [loggerHandler, handler])
return res?.response return res?.response
} else { } else {
const res = await chain.call({ input }) const res = await chain.call({ input }, [loggerHandler])
return res?.response return res?.response
} }
} }

View File

@ -1,10 +1,11 @@
import { BaseLanguageModel } from 'langchain/base_language' import { BaseLanguageModel } from 'langchain/base_language'
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface' import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { ConversationalRetrievalQAChain } from 'langchain/chains' import { ConversationalRetrievalQAChain } from 'langchain/chains'
import { AIMessage, BaseRetriever, HumanMessage } from 'langchain/schema' import { AIMessage, BaseRetriever, HumanMessage } from 'langchain/schema'
import { BaseChatMemory, BufferMemory, ChatMessageHistory } from 'langchain/memory' import { BaseChatMemory, BufferMemory, ChatMessageHistory } from 'langchain/memory'
import { PromptTemplate } from 'langchain/prompts' import { PromptTemplate } from 'langchain/prompts'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
const default_qa_template = `Use the following pieces of context to answer the question at the end, in its original language. If you don't know the answer, just say that you don't know in its original language, don't try to make up an answer. const default_qa_template = `Use the following pieces of context to answer the question at the end, in its original language. If you don't know the answer, just say that you don't know in its original language, don't try to make up an answer.
@ -175,13 +176,15 @@ class ConversationalRetrievalQAChain_Chains implements INode {
chain.memory = memory chain.memory = memory
} }
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) { if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, undefined, returnSourceDocuments) const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, undefined, returnSourceDocuments)
const res = await chain.call(obj, [handler]) const res = await chain.call(obj, [loggerHandler, handler])
if (res.text && res.sourceDocuments) return res if (res.text && res.sourceDocuments) return res
return res?.text return res?.text
} else { } else {
const res = await chain.call(obj) const res = await chain.call(obj, [loggerHandler])
if (res.text && res.sourceDocuments) return res if (res.text && res.sourceDocuments) return res
return res?.text return res?.text
} }

View File

@ -1,7 +1,8 @@
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { LLMChain } from 'langchain/chains' import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language' import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class LLMChain_Chains implements INode { class LLMChain_Chains implements INode {
label: string label: string
@ -55,7 +56,7 @@ class LLMChain_Chains implements INode {
] ]
} }
async init(nodeData: INodeData, input: string): Promise<any> { async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel const model = nodeData.inputs?.model as BaseLanguageModel
const prompt = nodeData.inputs?.prompt const prompt = nodeData.inputs?.prompt
const output = nodeData.outputs?.output as string const output = nodeData.outputs?.output as string
@ -67,7 +68,7 @@ class LLMChain_Chains implements INode {
} else if (output === 'outputPrediction') { } else if (output === 'outputPrediction') {
const chain = new LLMChain({ llm: model, prompt, verbose: process.env.DEBUG === 'true' ? true : false }) const chain = new LLMChain({ llm: model, prompt, verbose: process.env.DEBUG === 'true' ? true : false })
const inputVariables = chain.prompt.inputVariables as string[] // ["product"] const inputVariables = chain.prompt.inputVariables as string[] // ["product"]
const res = await runPrediction(inputVariables, chain, input, promptValues) const res = await runPrediction(inputVariables, chain, input, promptValues, options)
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.log('\x1b[92m\x1b[1m\n*****OUTPUT PREDICTION*****\n\x1b[0m\x1b[0m') console.log('\x1b[92m\x1b[1m\n*****OUTPUT PREDICTION*****\n\x1b[0m\x1b[0m')
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
@ -81,9 +82,7 @@ class LLMChain_Chains implements INode {
const chain = nodeData.instance as LLMChain const chain = nodeData.instance as LLMChain
const promptValues = nodeData.inputs?.prompt.promptValues as ICommonObject const promptValues = nodeData.inputs?.prompt.promptValues as ICommonObject
const res = options.socketIO const res = await runPrediction(inputVariables, chain, input, promptValues, options)
? await runPrediction(inputVariables, chain, input, promptValues, true, options.socketIO, options.socketIOClientId)
: await runPrediction(inputVariables, chain, input, promptValues)
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m') console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m')
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
@ -97,17 +96,20 @@ const runPrediction = async (
chain: LLMChain, chain: LLMChain,
input: string, input: string,
promptValues: ICommonObject, promptValues: ICommonObject,
isStreaming?: boolean, options: ICommonObject
socketIO?: any,
socketIOClientId = ''
) => { ) => {
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const isStreaming = options.socketIO && options.socketIOClientId
const socketIO = isStreaming ? options.socketIO : undefined
const socketIOClientId = isStreaming ? options.socketIOClientId : ''
if (inputVariables.length === 1) { if (inputVariables.length === 1) {
if (isStreaming) { if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId) const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.run(input, [handler]) const res = await chain.run(input, [loggerHandler, handler])
return res return res
} else { } else {
const res = await chain.run(input) const res = await chain.run(input, [loggerHandler])
return res return res
} }
} else if (inputVariables.length > 1) { } else if (inputVariables.length > 1) {
@ -122,15 +124,13 @@ const runPrediction = async (
if (seen.length === 0) { if (seen.length === 0) {
// All inputVariables have fixed values specified // All inputVariables have fixed values specified
const options = { const options = { ...promptValues }
...promptValues
}
if (isStreaming) { if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId) const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.call(options, [handler]) const res = await chain.call(options, [loggerHandler, handler])
return res?.text return res?.text
} else { } else {
const res = await chain.call(options) const res = await chain.call(options, [loggerHandler])
return res?.text return res?.text
} }
} else if (seen.length === 1) { } else if (seen.length === 1) {
@ -143,10 +143,10 @@ const runPrediction = async (
} }
if (isStreaming) { if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId) const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.call(options, [handler]) const res = await chain.call(options, [loggerHandler, handler])
return res?.text return res?.text
} else { } else {
const res = await chain.call(options) const res = await chain.call(options, [loggerHandler])
return res?.text return res?.text
} }
} else { } else {
@ -155,10 +155,10 @@ const runPrediction = async (
} else { } else {
if (isStreaming) { if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId) const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.run(input, [handler]) const res = await chain.run(input, [loggerHandler, handler])
return res return res
} else { } else {
const res = await chain.run(input) const res = await chain.run(input, [loggerHandler])
return res return res
} }
} }

View File

@ -1,7 +1,8 @@
import { BaseLanguageModel } from 'langchain/base_language' import { BaseLanguageModel } from 'langchain/base_language'
import { ICommonObject, INode, INodeData, INodeParams, PromptRetriever } from '../../../src/Interface' import { ICommonObject, INode, INodeData, INodeParams, PromptRetriever } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { MultiPromptChain } from 'langchain/chains' import { MultiPromptChain } from 'langchain/chains'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class MultiPromptChain_Chains implements INode { class MultiPromptChain_Chains implements INode {
label: string label: string
@ -63,12 +64,14 @@ class MultiPromptChain_Chains implements INode {
const chain = nodeData.instance as MultiPromptChain const chain = nodeData.instance as MultiPromptChain
const obj = { input } const obj = { input }
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) { if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2) const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2)
const res = await chain.call(obj, [handler]) const res = await chain.call(obj, [loggerHandler, handler])
return res?.text return res?.text
} else { } else {
const res = await chain.call(obj) const res = await chain.call(obj, [loggerHandler])
return res?.text return res?.text
} }
} }

View File

@ -1,7 +1,8 @@
import { BaseLanguageModel } from 'langchain/base_language' import { BaseLanguageModel } from 'langchain/base_language'
import { ICommonObject, INode, INodeData, INodeParams, VectorStoreRetriever } from '../../../src/Interface' import { ICommonObject, INode, INodeData, INodeParams, VectorStoreRetriever } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { MultiRetrievalQAChain } from 'langchain/chains' import { MultiRetrievalQAChain } from 'langchain/chains'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class MultiRetrievalQAChain_Chains implements INode { class MultiRetrievalQAChain_Chains implements INode {
label: string label: string
@ -71,14 +72,15 @@ class MultiRetrievalQAChain_Chains implements INode {
const returnSourceDocuments = nodeData.inputs?.returnSourceDocuments as boolean const returnSourceDocuments = nodeData.inputs?.returnSourceDocuments as boolean
const obj = { input } const obj = { input }
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) { if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2, returnSourceDocuments) const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2, returnSourceDocuments)
const res = await chain.call(obj, [handler]) const res = await chain.call(obj, [loggerHandler, handler])
if (res.text && res.sourceDocuments) return res if (res.text && res.sourceDocuments) return res
return res?.text return res?.text
} else { } else {
const res = await chain.call(obj) const res = await chain.call(obj, [loggerHandler])
if (res.text && res.sourceDocuments) return res if (res.text && res.sourceDocuments) return res
return res?.text return res?.text
} }

View File

@ -1,8 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { RetrievalQAChain } from 'langchain/chains' import { RetrievalQAChain } from 'langchain/chains'
import { BaseRetriever } from 'langchain/schema' import { BaseRetriever } from 'langchain/schema'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language' import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class RetrievalQAChain_Chains implements INode { class RetrievalQAChain_Chains implements INode {
label: string label: string
@ -49,13 +50,14 @@ class RetrievalQAChain_Chains implements INode {
const obj = { const obj = {
query: input query: input
} }
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) { if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId) const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const res = await chain.call(obj, [handler]) const res = await chain.call(obj, [loggerHandler, handler])
return res?.text return res?.text
} else { } else {
const res = await chain.call(obj) const res = await chain.call(obj, [loggerHandler])
return res?.text return res?.text
} }
} }

View File

@ -1,9 +1,10 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { SqlDatabaseChain, SqlDatabaseChainInput } from 'langchain/chains/sql_db' import { SqlDatabaseChain, SqlDatabaseChainInput } from 'langchain/chains/sql_db'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { DataSource } from 'typeorm' import { DataSource } from 'typeorm'
import { SqlDatabase } from 'langchain/sql_db' import { SqlDatabase } from 'langchain/sql_db'
import { BaseLanguageModel } from 'langchain/base_language' import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class SqlDatabaseChain_Chains implements INode { class SqlDatabaseChain_Chains implements INode {
label: string label: string
@ -65,12 +66,14 @@ class SqlDatabaseChain_Chains implements INode {
const dbFilePath = nodeData.inputs?.dbFilePath const dbFilePath = nodeData.inputs?.dbFilePath
const chain = await getSQLDBChain(databaseType, dbFilePath, model) const chain = await getSQLDBChain(databaseType, dbFilePath, model)
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) { if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2) const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2)
const res = await chain.run(input, [handler]) const res = await chain.run(input, [loggerHandler, handler])
return res return res
} else { } else {
const res = await chain.run(input) const res = await chain.run(input, [loggerHandler])
return res return res
} }
} }

View File

@ -1,8 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils' import { getBaseClasses } from '../../../src/utils'
import { VectorDBQAChain } from 'langchain/chains' import { VectorDBQAChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language' import { BaseLanguageModel } from 'langchain/base_language'
import { VectorStore } from 'langchain/vectorstores' import { VectorStore } from 'langchain/vectorstores'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class VectorDBQAChain_Chains implements INode { class VectorDBQAChain_Chains implements INode {
label: string label: string
@ -53,12 +54,14 @@ class VectorDBQAChain_Chains implements INode {
query: input query: input
} }
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) { if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId) const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const res = await chain.call(obj, [handler]) const res = await chain.call(obj, [loggerHandler, handler])
return res?.text return res?.text
} else { } else {
const res = await chain.call(obj) const res = await chain.call(obj, [loggerHandler])
return res?.text return res?.text
} }
} }

View File

@ -0,0 +1,180 @@
import { BaseTracer, Run, BaseCallbackHandler } from 'langchain/callbacks'
import { AgentAction, ChainValues } from 'langchain/schema'
import { Logger } from 'winston'
import { Server } from 'socket.io'
interface AgentRun extends Run {
actions: AgentAction[]
}
function tryJsonStringify(obj: unknown, fallback: string) {
try {
return JSON.stringify(obj, null, 2)
} catch (err) {
return fallback
}
}
function elapsed(run: Run): string {
if (!run.end_time) return ''
const elapsed = run.end_time - run.start_time
if (elapsed < 1000) {
return `${elapsed}ms`
}
return `${(elapsed / 1000).toFixed(2)}s`
}
export class ConsoleCallbackHandler extends BaseTracer {
name = 'console_callback_handler' as const
logger: Logger
protected persistRun(_run: Run) {
return Promise.resolve()
}
constructor(logger: Logger) {
super()
this.logger = logger
}
// utility methods
getParents(run: Run) {
const parents: Run[] = []
let currentRun = run
while (currentRun.parent_run_id) {
const parent = this.runMap.get(currentRun.parent_run_id)
if (parent) {
parents.push(parent)
currentRun = parent
} else {
break
}
}
return parents
}
getBreadcrumbs(run: Run) {
const parents = this.getParents(run).reverse()
const string = [...parents, run]
.map((parent) => {
const name = `${parent.execution_order}:${parent.run_type}:${parent.name}`
return name
})
.join(' > ')
return string
}
// logging methods
onChainStart(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(`[chain/start] [${crumbs}] Entering Chain run with input: ${tryJsonStringify(run.inputs, '[inputs]')}`)
}
onChainEnd(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(
`[chain/end] [${crumbs}] [${elapsed(run)}] Exiting Chain run with output: ${tryJsonStringify(run.outputs, '[outputs]')}`
)
}
onChainError(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(
`[chain/error] [${crumbs}] [${elapsed(run)}] Chain run errored with error: ${tryJsonStringify(run.error, '[error]')}`
)
}
onLLMStart(run: Run) {
const crumbs = this.getBreadcrumbs(run)
const inputs = 'prompts' in run.inputs ? { prompts: (run.inputs.prompts as string[]).map((p) => p.trim()) } : run.inputs
this.logger.verbose(`[llm/start] [${crumbs}] Entering LLM run with input: ${tryJsonStringify(inputs, '[inputs]')}`)
}
onLLMEnd(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(
`[llm/end] [${crumbs}] [${elapsed(run)}] Exiting LLM run with output: ${tryJsonStringify(run.outputs, '[response]')}`
)
}
onLLMError(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(
`[llm/error] [${crumbs}] [${elapsed(run)}] LLM run errored with error: ${tryJsonStringify(run.error, '[error]')}`
)
}
onToolStart(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(`[tool/start] [${crumbs}] Entering Tool run with input: "${run.inputs.input?.trim()}"`)
}
onToolEnd(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(`[tool/end] [${crumbs}] [${elapsed(run)}] Exiting Tool run with output: "${run.outputs?.output?.trim()}"`)
}
onToolError(run: Run) {
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(
`[tool/error] [${crumbs}] [${elapsed(run)}] Tool run errored with error: ${tryJsonStringify(run.error, '[error]')}`
)
}
onAgentAction(run: Run) {
const agentRun = run as AgentRun
const crumbs = this.getBreadcrumbs(run)
this.logger.verbose(
`[agent/action] [${crumbs}] Agent selected action: ${tryJsonStringify(
agentRun.actions[agentRun.actions.length - 1],
'[action]'
)}`
)
}
}
/**
* Custom chain handler class
*/
export class CustomChainHandler extends BaseCallbackHandler {
name = 'custom_chain_handler'
isLLMStarted = false
socketIO: Server
socketIOClientId = ''
skipK = 0 // Skip streaming for first K numbers of handleLLMStart
returnSourceDocuments = false
constructor(socketIO: Server, socketIOClientId: string, skipK?: number, returnSourceDocuments?: boolean) {
super()
this.socketIO = socketIO
this.socketIOClientId = socketIOClientId
this.skipK = skipK ?? this.skipK
this.returnSourceDocuments = returnSourceDocuments ?? this.returnSourceDocuments
}
handleLLMStart() {
if (this.skipK > 0) this.skipK -= 1
}
handleLLMNewToken(token: string) {
if (this.skipK === 0) {
if (!this.isLLMStarted) {
this.isLLMStarted = true
this.socketIO.to(this.socketIOClientId).emit('start', token)
}
this.socketIO.to(this.socketIOClientId).emit('token', token)
}
}
handleLLMEnd() {
this.socketIO.to(this.socketIOClientId).emit('end')
}
handleChainEnd(outputs: ChainValues): void | Promise<void> {
if (this.returnSourceDocuments) {
this.socketIO.to(this.socketIOClientId).emit('sourceDocuments', outputs?.sourceDocuments)
}
}
}

View File

@ -3,9 +3,6 @@ import { load } from 'cheerio'
import * as fs from 'fs' import * as fs from 'fs'
import * as path from 'path' import * as path from 'path'
import { JSDOM } from 'jsdom' import { JSDOM } from 'jsdom'
import { BaseCallbackHandler } from 'langchain/callbacks'
import { Server } from 'socket.io'
import { ChainValues } from 'langchain/dist/schema'
export const numberOrExpressionRegex = '^(\\d+\\.?\\d*|{{.*}})$' //return true if string consists only numbers OR expression {{}} export const numberOrExpressionRegex = '^(\\d+\\.?\\d*|{{.*}})$' //return true if string consists only numbers OR expression {{}}
export const notEmptyRegex = '(.|\\s)*\\S(.|\\s)*' //return true if string is not empty or blank export const notEmptyRegex = '(.|\\s)*\\S(.|\\s)*' //return true if string is not empty or blank
@ -350,50 +347,9 @@ export const getEnvironmentVariable = (name: string): string | undefined => {
} }
} }
/** /*
* Custom chain handler class * List of dependencies allowed to be import in vm2
*/ */
export class CustomChainHandler extends BaseCallbackHandler {
name = 'custom_chain_handler'
isLLMStarted = false
socketIO: Server
socketIOClientId = ''
skipK = 0 // Skip streaming for first K numbers of handleLLMStart
returnSourceDocuments = false
constructor(socketIO: Server, socketIOClientId: string, skipK?: number, returnSourceDocuments?: boolean) {
super()
this.socketIO = socketIO
this.socketIOClientId = socketIOClientId
this.skipK = skipK ?? this.skipK
this.returnSourceDocuments = returnSourceDocuments ?? this.returnSourceDocuments
}
handleLLMStart() {
if (this.skipK > 0) this.skipK -= 1
}
handleLLMNewToken(token: string) {
if (this.skipK === 0) {
if (!this.isLLMStarted) {
this.isLLMStarted = true
this.socketIO.to(this.socketIOClientId).emit('start', token)
}
this.socketIO.to(this.socketIOClientId).emit('token', token)
}
}
handleLLMEnd() {
this.socketIO.to(this.socketIOClientId).emit('end')
}
handleChainEnd(outputs: ChainValues): void | Promise<void> {
if (this.returnSourceDocuments) {
this.socketIO.to(this.socketIOClientId).emit('sourceDocuments', outputs?.sourceDocuments)
}
}
}
export const availableDependencies = [ export const availableDependencies = [
'@dqbd/tiktoken', '@dqbd/tiktoken',
'@getzep/zep-js', '@getzep/zep-js',

View File

@ -5,4 +5,5 @@ PORT=3000
# DATABASE_PATH=/your_database_path/.flowise # DATABASE_PATH=/your_database_path/.flowise
# APIKEY_PATH=/your_api_key_path/.flowise # APIKEY_PATH=/your_api_key_path/.flowise
# LOG_PATH=/your_log_path/logs # LOG_PATH=/your_log_path/logs
# EXECUTION_MODE=child or main # LOG_LEVEL=debug (error | warn | info | verbose | debug)
# EXECUTION_MODE=main (child | main)

View File

@ -1,5 +1,6 @@
import { ICommonObject } from 'flowise-components' import { ICommonObject } from 'flowise-components'
import { IActiveChatflows, INodeData, IReactFlowNode } from './Interface' import { IActiveChatflows, INodeData, IReactFlowNode } from './Interface'
import logger from './utils/logger'
/** /**
* This pool is to keep track of active chatflow pools * This pool is to keep track of active chatflow pools
@ -22,6 +23,7 @@ export class ChatflowPool {
inSync: true inSync: true
} }
if (overrideConfig) this.activeChatflows[chatflowid].overrideConfig = overrideConfig if (overrideConfig) this.activeChatflows[chatflowid].overrideConfig = overrideConfig
logger.info(`[server]: Chatflow ${chatflowid} added into ChatflowPool`)
} }
/** /**
@ -32,6 +34,7 @@ export class ChatflowPool {
updateInSync(chatflowid: string, inSync: boolean) { updateInSync(chatflowid: string, inSync: boolean) {
if (Object.prototype.hasOwnProperty.call(this.activeChatflows, chatflowid)) { if (Object.prototype.hasOwnProperty.call(this.activeChatflows, chatflowid)) {
this.activeChatflows[chatflowid].inSync = inSync this.activeChatflows[chatflowid].inSync = inSync
logger.info(`[server]: Chatflow ${chatflowid} updated inSync=${inSync} in ChatflowPool`)
} }
} }
@ -42,6 +45,7 @@ export class ChatflowPool {
async remove(chatflowid: string) { async remove(chatflowid: string) {
if (Object.prototype.hasOwnProperty.call(this.activeChatflows, chatflowid)) { if (Object.prototype.hasOwnProperty.call(this.activeChatflows, chatflowid)) {
delete this.activeChatflows[chatflowid] delete this.activeChatflows[chatflowid]
logger.info(`[server]: Chatflow ${chatflowid} removed from ChatflowPool`)
} }
} }
} }

View File

@ -5,6 +5,7 @@ import { DataSource } from 'typeorm'
import { ChatFlow } from './entity/ChatFlow' import { ChatFlow } from './entity/ChatFlow'
import { ChatMessage } from './entity/ChatMessage' import { ChatMessage } from './entity/ChatMessage'
import { Tool } from './entity/Tool' import { Tool } from './entity/Tool'
import logger from './utils/logger'
export class ChildProcess { export class ChildProcess {
/** /**
@ -27,6 +28,7 @@ export class ChildProcess {
await sendToParentProcess('start', '_') await sendToParentProcess('start', '_')
try {
const childAppDataSource = await initDB() const childAppDataSource = await initDB()
// Create a Queue and add our initial node in it // Create a Queue and add our initial node in it
@ -55,12 +57,17 @@ export class ChildProcess {
const directedGraph = graph const directedGraph = graph
const endingNodeId = getEndingNode(nodeDependencies, directedGraph) const endingNodeId = getEndingNode(nodeDependencies, directedGraph)
if (!endingNodeId) { if (!endingNodeId) {
await sendToParentProcess('error', `Ending node must be either a Chain or Agent`) await sendToParentProcess('error', `Ending node ${endingNodeId} not found`)
return return
} }
const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data
if (!endingNodeData) { if (!endingNodeData) {
await sendToParentProcess('error', `Ending node ${endingNodeId} data not found`)
return
}
if (endingNodeData && endingNodeData.category !== 'Chains' && endingNodeData.category !== 'Agents') {
await sendToParentProcess('error', `Ending node must be either a Chain or Agent`) await sendToParentProcess('error', `Ending node must be either a Chain or Agent`)
return return
} }
@ -82,6 +89,7 @@ export class ChildProcess {
const nonDirectedGraph = constructedObj.graph const nonDirectedGraph = constructedObj.graph
const { startingNodeIds, depthQueue } = getStartingNodes(nonDirectedGraph, endingNodeId) const { startingNodeIds, depthQueue } = getStartingNodes(nonDirectedGraph, endingNodeId)
logger.debug(`[server] [mode:child]: Start building chatflow ${chatflow.id}`)
/*** BFS to traverse from Starting Nodes to Ending Node ***/ /*** BFS to traverse from Starting Nodes to Ending Node ***/
const reactFlowNodes = await buildLangchain( const reactFlowNodes = await buildLangchain(
startingNodeIds, startingNodeIds,
@ -117,9 +125,15 @@ export class ChildProcess {
const nodeModule = await import(nodeInstanceFilePath) const nodeModule = await import(nodeInstanceFilePath)
const nodeInstance = new nodeModule.nodeClass() const nodeInstance = new nodeModule.nodeClass()
logger.debug(`[server] [mode:child]: Running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`)
const result = await nodeInstance.run(nodeToExecuteData, incomingInput.question, { chatHistory: incomingInput.history }) const result = await nodeInstance.run(nodeToExecuteData, incomingInput.question, { chatHistory: incomingInput.history })
logger.debug(`[server] [mode:child]: Finished running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`)
await sendToParentProcess('finish', { result, addToChatFlowPool }) await sendToParentProcess('finish', { result, addToChatFlowPool })
} catch (e: any) {
await sendToParentProcess('error', e.message)
logger.error('[server] [mode:child]: Error:', e)
}
} }
} }

View File

@ -23,6 +23,7 @@ export default class Start extends Command {
DATABASE_PATH: Flags.string(), DATABASE_PATH: Flags.string(),
APIKEY_PATH: Flags.string(), APIKEY_PATH: Flags.string(),
LOG_PATH: Flags.string(), LOG_PATH: Flags.string(),
LOG_LEVEL: Flags.string(),
EXECUTION_MODE: Flags.string() EXECUTION_MODE: Flags.string()
} }
@ -61,6 +62,7 @@ export default class Start extends Command {
if (flags.DATABASE_PATH) process.env.DATABASE_PATH = flags.DATABASE_PATH if (flags.DATABASE_PATH) process.env.DATABASE_PATH = flags.DATABASE_PATH
if (flags.APIKEY_PATH) process.env.APIKEY_PATH = flags.APIKEY_PATH if (flags.APIKEY_PATH) process.env.APIKEY_PATH = flags.APIKEY_PATH
if (flags.LOG_PATH) process.env.LOG_PATH = flags.LOG_PATH if (flags.LOG_PATH) process.env.LOG_PATH = flags.LOG_PATH
if (flags.LOG_LEVEL) process.env.LOG_LEVEL = flags.LOG_LEVEL
if (flags.EXECUTION_MODE) process.env.EXECUTION_MODE = flags.EXECUTION_MODE if (flags.EXECUTION_MODE) process.env.EXECUTION_MODE = flags.EXECUTION_MODE
if (flags.DEBUG) process.env.DEBUG = flags.DEBUG if (flags.DEBUG) process.env.DEBUG = flags.DEBUG

View File

@ -283,10 +283,16 @@ export class App {
const nodes = parsedFlowData.nodes const nodes = parsedFlowData.nodes
const edges = parsedFlowData.edges const edges = parsedFlowData.edges
const { graph, nodeDependencies } = constructGraphs(nodes, edges) const { graph, nodeDependencies } = constructGraphs(nodes, edges)
const endingNodeId = getEndingNode(nodeDependencies, graph) const endingNodeId = getEndingNode(nodeDependencies, graph)
if (!endingNodeId) return res.status(500).send(`Ending node must be either a Chain or Agent`) if (!endingNodeId) return res.status(500).send(`Ending node ${endingNodeId} not found`)
const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data
if (!endingNodeData) return res.status(500).send(`Ending node must be either a Chain or Agent`) if (!endingNodeData) return res.status(500).send(`Ending node ${endingNodeId} data not found`)
if (endingNodeData && endingNodeData.category !== 'Chains' && endingNodeData.category !== 'Agents') {
return res.status(500).send(`Ending node must be either a Chain or Agent`)
}
const obj = { const obj = {
isStreaming: isFlowValidForStream(nodes, endingNodeData) isStreaming: isFlowValidForStream(nodes, endingNodeData)
@ -638,7 +644,7 @@ export class App {
}) })
}) })
} catch (err) { } catch (err) {
logger.error(err) logger.error('[server] [mode:child]: Error:', err)
} }
} }
@ -714,9 +720,11 @@ export class App {
if (process.env.EXECUTION_MODE === 'child') { if (process.env.EXECUTION_MODE === 'child') {
if (isFlowReusable()) { if (isFlowReusable()) {
nodeToExecuteData = this.chatflowPool.activeChatflows[chatflowid].endingNodeData nodeToExecuteData = this.chatflowPool.activeChatflows[chatflowid].endingNodeData
logger.debug(
`[server] [mode:child]: Reuse existing chatflow ${chatflowid} with ending node ${nodeToExecuteData.label} (${nodeToExecuteData.id})`
)
try { try {
const result = await this.startChildProcess(chatflow, chatId, incomingInput, nodeToExecuteData) const result = await this.startChildProcess(chatflow, chatId, incomingInput, nodeToExecuteData)
return res.json(result) return res.json(result)
} catch (error) { } catch (error) {
return res.status(500).send(error) return res.status(500).send(error)
@ -739,15 +747,22 @@ export class App {
if (isFlowReusable()) { if (isFlowReusable()) {
nodeToExecuteData = this.chatflowPool.activeChatflows[chatflowid].endingNodeData nodeToExecuteData = this.chatflowPool.activeChatflows[chatflowid].endingNodeData
isStreamValid = isFlowValidForStream(nodes, nodeToExecuteData) isStreamValid = isFlowValidForStream(nodes, nodeToExecuteData)
logger.debug(
`[server]: Reuse existing chatflow ${chatflowid} with ending node ${nodeToExecuteData.label} (${nodeToExecuteData.id})`
)
} else { } else {
/*** Get Ending Node with Directed Graph ***/ /*** Get Ending Node with Directed Graph ***/
const { graph, nodeDependencies } = constructGraphs(nodes, edges) const { graph, nodeDependencies } = constructGraphs(nodes, edges)
const directedGraph = graph const directedGraph = graph
const endingNodeId = getEndingNode(nodeDependencies, directedGraph) const endingNodeId = getEndingNode(nodeDependencies, directedGraph)
if (!endingNodeId) return res.status(500).send(`Ending node must be either a Chain or Agent`) if (!endingNodeId) return res.status(500).send(`Ending node ${endingNodeId} not found`)
const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data
if (!endingNodeData) return res.status(500).send(`Ending node must be either a Chain or Agent`) if (!endingNodeData) return res.status(500).send(`Ending node ${endingNodeId} data not found`)
if (endingNodeData && endingNodeData.category !== 'Chains' && endingNodeData.category !== 'Agents') {
return res.status(500).send(`Ending node must be either a Chain or Agent`)
}
if ( if (
endingNodeData.outputs && endingNodeData.outputs &&
@ -768,6 +783,7 @@ export class App {
const nonDirectedGraph = constructedObj.graph const nonDirectedGraph = constructedObj.graph
const { startingNodeIds, depthQueue } = getStartingNodes(nonDirectedGraph, endingNodeId) const { startingNodeIds, depthQueue } = getStartingNodes(nonDirectedGraph, endingNodeId)
logger.debug(`[server]: Start building chatflow ${chatflowid}`)
/*** BFS to traverse from Starting Nodes to Ending Node ***/ /*** BFS to traverse from Starting Nodes to Ending Node ***/
const reactFlowNodes = await buildLangchain( const reactFlowNodes = await buildLangchain(
startingNodeIds, startingNodeIds,
@ -796,17 +812,21 @@ export class App {
const nodeInstance = new nodeModule.nodeClass() const nodeInstance = new nodeModule.nodeClass()
isStreamValid = isStreamValid && !isVectorStoreFaiss(nodeToExecuteData) isStreamValid = isStreamValid && !isVectorStoreFaiss(nodeToExecuteData)
logger.debug(`[server]: Running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`)
const result = isStreamValid const result = isStreamValid
? await nodeInstance.run(nodeToExecuteData, incomingInput.question, { ? await nodeInstance.run(nodeToExecuteData, incomingInput.question, {
chatHistory: incomingInput.history, chatHistory: incomingInput.history,
socketIO, socketIO,
socketIOClientId: incomingInput.socketIOClientId socketIOClientId: incomingInput.socketIOClientId,
logger
}) })
: await nodeInstance.run(nodeToExecuteData, incomingInput.question, { chatHistory: incomingInput.history }) : await nodeInstance.run(nodeToExecuteData, incomingInput.question, { chatHistory: incomingInput.history, logger })
logger.debug(`[server]: Finished running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`)
return res.json(result) return res.json(result)
} }
} catch (e: any) { } catch (e: any) {
logger.error('[server]: Error:', e)
return res.status(500).send(e.message) return res.status(500).send(e.message)
} }
} }

View File

@ -9,12 +9,12 @@ dotenv.config({ path: path.join(__dirname, '..', '..', '.env'), override: true }
const loggingConfig = { const loggingConfig = {
dir: process.env.LOG_PATH ?? path.join(__dirname, '..', '..', '..', '..', 'logs'), dir: process.env.LOG_PATH ?? path.join(__dirname, '..', '..', '..', '..', 'logs'),
server: { server: {
level: 'info', level: process.env.LOG_LEVEL ?? 'info',
filename: 'server.log', filename: 'server.log',
errorFilename: 'server-error.log' errorFilename: 'server-error.log'
}, },
express: { express: {
level: 'info', level: process.env.LOG_LEVEL ?? 'info',
format: 'jsonl', // can't be changed currently format: 'jsonl', // can't be changed currently
filename: 'server-requests.log.jsonl' // should end with .jsonl filename: 'server-requests.log.jsonl' // should end with .jsonl
} }

View File

@ -180,6 +180,9 @@ export const getEndingNode = (nodeDependencies: INodeDependencies, graph: INodeD
* @param {IDepthQueue} depthQueue * @param {IDepthQueue} depthQueue
* @param {IComponentNodes} componentNodes * @param {IComponentNodes} componentNodes
* @param {string} question * @param {string} question
* @param {string} chatId
* @param {DataSource} appDataSource
* @param {ICommonObject} overrideConfig
*/ */
export const buildLangchain = async ( export const buildLangchain = async (
startingNodeIds: string[], startingNodeIds: string[],
@ -222,11 +225,14 @@ export const buildLangchain = async (
if (overrideConfig) flowNodeData = replaceInputsWithConfig(flowNodeData, overrideConfig) if (overrideConfig) flowNodeData = replaceInputsWithConfig(flowNodeData, overrideConfig)
const reactFlowNodeData: INodeData = resolveVariables(flowNodeData, flowNodes, question) const reactFlowNodeData: INodeData = resolveVariables(flowNodeData, flowNodes, question)
logger.debug(`[server]: Initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`)
flowNodes[nodeIndex].data.instance = await newNodeInstance.init(reactFlowNodeData, question, { flowNodes[nodeIndex].data.instance = await newNodeInstance.init(reactFlowNodeData, question, {
chatId, chatId,
appDataSource, appDataSource,
databaseEntities databaseEntities,
logger
}) })
logger.debug(`[server]: Finished initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`)
} catch (e: any) { } catch (e: any) {
logger.error(e) logger.error(e)
throw new Error(e) throw new Error(e)

View File

@ -4,7 +4,7 @@ import config from './config' // should be replaced by node-config or similar
import { createLogger, transports, format } from 'winston' import { createLogger, transports, format } from 'winston'
import { NextFunction, Request, Response } from 'express' import { NextFunction, Request, Response } from 'express'
const { combine, timestamp, printf } = format const { combine, timestamp, printf, errors } = format
// expect the log dir be relative to the projects root // expect the log dir be relative to the projects root
const logDir = config.logging.dir const logDir = config.logging.dir
@ -18,9 +18,11 @@ const logger = createLogger({
format: combine( format: combine(
timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
format.json(), format.json(),
printf(({ level, message, timestamp }) => { printf(({ level, message, timestamp, stack }) => {
return `${timestamp} [${level.toUpperCase()}]: ${message}` const text = `${timestamp} [${level.toUpperCase()}]: ${message}`
}) return stack ? text + '\n' + stack : text
}),
errors({ stack: true })
), ),
defaultMeta: { defaultMeta: {
package: 'server' package: 'server'
@ -56,7 +58,7 @@ const logger = createLogger({
*/ */
export function expressRequestLogger(req: Request, res: Response, next: NextFunction): void { export function expressRequestLogger(req: Request, res: Response, next: NextFunction): void {
const fileLogger = createLogger({ const fileLogger = createLogger({
format: combine(timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), format.json()), format: combine(timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), format.json(), errors({ stack: true })),
defaultMeta: { defaultMeta: {
package: 'server', package: 'server',
request: { request: {
@ -71,7 +73,7 @@ export function expressRequestLogger(req: Request, res: Response, next: NextFunc
transports: [ transports: [
new transports.File({ new transports.File({
filename: path.join(logDir, config.logging.express.filename ?? 'server-requests.log.jsonl'), filename: path.join(logDir, config.logging.express.filename ?? 'server-requests.log.jsonl'),
level: 'debug' level: config.logging.express.level ?? 'debug'
}) })
] ]
}) })