Bugfix/Disable output prediction from llmchain streaming (#2543)
disable output prediction from llmchain streaming
This commit is contained in:
parent
e5f0ca2dd3
commit
e15e6fafdc
|
|
@ -110,7 +110,9 @@ class LLMChain_Chains implements INode {
|
|||
})
|
||||
const inputVariables = chain.prompt.inputVariables as string[] // ["product"]
|
||||
promptValues = injectOutputParser(this.outputParser, chain, promptValues)
|
||||
const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData)
|
||||
// Disable streaming because its not final chain
|
||||
const disableStreaming = true
|
||||
const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData, disableStreaming)
|
||||
// eslint-disable-next-line no-console
|
||||
console.log('\x1b[92m\x1b[1m\n*****OUTPUT PREDICTION*****\n\x1b[0m\x1b[0m')
|
||||
// eslint-disable-next-line no-console
|
||||
|
|
@ -154,12 +156,13 @@ const runPrediction = async (
|
|||
input: string,
|
||||
promptValuesRaw: ICommonObject | undefined,
|
||||
options: ICommonObject,
|
||||
nodeData: INodeData
|
||||
nodeData: INodeData,
|
||||
disableStreaming?: boolean
|
||||
) => {
|
||||
const loggerHandler = new ConsoleCallbackHandler(options.logger)
|
||||
const callbacks = await additionalCallbacks(nodeData, options)
|
||||
|
||||
const isStreaming = options.socketIO && options.socketIOClientId
|
||||
const isStreaming = !disableStreaming && options.socketIO && options.socketIOClientId
|
||||
const socketIO = isStreaming ? options.socketIO : undefined
|
||||
const socketIOClientId = isStreaming ? options.socketIOClientId : ''
|
||||
const moderations = nodeData.inputs?.inputModeration as Moderation[]
|
||||
|
|
|
|||
Loading…
Reference in New Issue