Removal of the custom output parsing.

This commit is contained in:
vinodkiran 2023-10-28 09:09:29 +05:30
parent 6159fa57ef
commit 3696c4517a
1 changed files with 24 additions and 18 deletions

View File

@ -4,7 +4,8 @@ import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language' import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler' import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { BaseOutputParser } from 'langchain/schema/output_parser' import { BaseOutputParser } from 'langchain/schema/output_parser'
import { injectOutputParser, applyOutputParser } from '../../outputparsers/OutputParserHelpers' import { injectOutputParser } from '../../outputparsers/OutputParserHelpers'
import { BaseLLMOutputParser } from 'langchain/schema/output_parser'
class LLMChain_Chains implements INode { class LLMChain_Chains implements INode {
label: string label: string
@ -28,16 +29,16 @@ class LLMChain_Chains implements INode {
this.description = 'Chain to run queries against LLMs' this.description = 'Chain to run queries against LLMs'
this.baseClasses = [this.type, ...getBaseClasses(LLMChain)] this.baseClasses = [this.type, ...getBaseClasses(LLMChain)]
this.inputs = [ this.inputs = [
{
label: 'Language Model',
name: 'model',
type: 'BaseLanguageModel'
},
{ {
label: 'Prompt', label: 'Prompt',
name: 'prompt', name: 'prompt',
type: 'BasePromptTemplate' type: 'BasePromptTemplate'
}, },
{
label: 'Language Model',
name: 'model',
type: 'BaseLanguageModel'
},
{ {
label: 'Output Parser', label: 'Output Parser',
name: 'outputParser', name: 'outputParser',
@ -71,12 +72,18 @@ class LLMChain_Chains implements INode {
const prompt = nodeData.inputs?.prompt const prompt = nodeData.inputs?.prompt
const output = nodeData.outputs?.output as string const output = nodeData.outputs?.output as string
const promptValues = prompt.promptValues as ICommonObject const promptValues = prompt.promptValues as ICommonObject
const llmOutputParser = nodeData.inputs?.outputParser as BaseLLMOutputParser<string | object>
if (output === this.name) { if (output === this.name) {
const chain = new LLMChain({ llm: model, prompt, verbose: process.env.DEBUG === 'true' ? true : false }) const chain = new LLMChain({ llm: model, outputParser: llmOutputParser, prompt, verbose: process.env.DEBUG === 'true' })
return chain return chain
} else if (output === 'outputPrediction') { } else if (output === 'outputPrediction') {
const chain = new LLMChain({ llm: model, prompt, verbose: process.env.DEBUG === 'true' ? true : false }) const chain = new LLMChain({
llm: model,
outputParser: llmOutputParser,
prompt,
verbose: process.env.DEBUG === 'true'
})
const inputVariables = chain.prompt.inputVariables as string[] // ["product"] const inputVariables = chain.prompt.inputVariables as string[] // ["product"]
const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData) const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData)
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
@ -98,7 +105,7 @@ class LLMChain_Chains implements INode {
let promptValues: ICommonObject | undefined = nodeData.inputs?.prompt.promptValues as ICommonObject let promptValues: ICommonObject | undefined = nodeData.inputs?.prompt.promptValues as ICommonObject
const outputParser = nodeData.inputs?.outputParser as BaseOutputParser const outputParser = nodeData.inputs?.outputParser as BaseOutputParser
promptValues = injectOutputParser(outputParser, chain, promptValues) promptValues = injectOutputParser(outputParser, chain, promptValues)
const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData, outputParser) const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData)
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m') console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m')
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
@ -109,12 +116,11 @@ class LLMChain_Chains implements INode {
const runPrediction = async ( const runPrediction = async (
inputVariables: string[], inputVariables: string[],
chain: LLMChain, chain: LLMChain<string | object>,
input: string, input: string,
promptValuesRaw: ICommonObject | undefined, promptValuesRaw: ICommonObject | undefined,
options: ICommonObject, options: ICommonObject,
nodeData: INodeData, nodeData: INodeData
outputParser: BaseOutputParser | undefined = undefined
) => { ) => {
const loggerHandler = new ConsoleCallbackHandler(options.logger) const loggerHandler = new ConsoleCallbackHandler(options.logger)
const callbacks = await additionalCallbacks(nodeData, options) const callbacks = await additionalCallbacks(nodeData, options)
@ -146,10 +152,10 @@ const runPrediction = async (
if (isStreaming) { if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId) const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.call(options, [loggerHandler, handler, ...callbacks]) const res = await chain.call(options, [loggerHandler, handler, ...callbacks])
return applyOutputParser(res?.text, outputParser) return res?.text
} else { } else {
const res = await chain.call(options, [loggerHandler, ...callbacks]) const res = await chain.call(options, [loggerHandler, ...callbacks])
return applyOutputParser(res?.text, outputParser) return res?.text
} }
} else if (seen.length === 1) { } else if (seen.length === 1) {
// If one inputVariable is not specify, use input (user's question) as value // If one inputVariable is not specify, use input (user's question) as value
@ -162,10 +168,10 @@ const runPrediction = async (
if (isStreaming) { if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId) const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.call(options, [loggerHandler, handler, ...callbacks]) const res = await chain.call(options, [loggerHandler, handler, ...callbacks])
return applyOutputParser(res?.text, outputParser) return res?.text
} else { } else {
const res = await chain.call(options, [loggerHandler, ...callbacks]) const res = await chain.call(options, [loggerHandler, ...callbacks])
return applyOutputParser(res?.text, outputParser) return res?.text
} }
} else { } else {
throw new Error(`Please provide Prompt Values for: ${seen.join(', ')}`) throw new Error(`Please provide Prompt Values for: ${seen.join(', ')}`)
@ -174,10 +180,10 @@ const runPrediction = async (
if (isStreaming) { if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId) const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.run(input, [loggerHandler, handler, ...callbacks]) const res = await chain.run(input, [loggerHandler, handler, ...callbacks])
return applyOutputParser(res, outputParser) return res
} else { } else {
const res = await chain.run(input, [loggerHandler, ...callbacks]) const res = await chain.run(input, [loggerHandler, ...callbacks])
return applyOutputParser(res, outputParser) return res
} }
} }
} }