add streaming for Open AI
This commit is contained in:
parent
6b49cfa6f0
commit
b782a8eef6
|
|
@ -15,7 +15,7 @@ class RetrievalQAChain_Chains implements INode {
|
|||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'RetrievalQA Chain'
|
||||
this.label = 'Retrieval QA Chain'
|
||||
this.name = 'retrievalQAChain'
|
||||
this.type = 'RetrievalQAChain'
|
||||
this.icon = 'chain.svg'
|
||||
|
|
|
|||
|
|
@ -96,6 +96,13 @@ class ChatOpenAI_ChatModels implements INode {
|
|||
type: 'number',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Streaming',
|
||||
name: 'streaming',
|
||||
type: 'boolean',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -109,6 +116,7 @@ class ChatOpenAI_ChatModels implements INode {
|
|||
const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string
|
||||
const presencePenalty = nodeData.inputs?.presencePenalty as string
|
||||
const timeout = nodeData.inputs?.timeout as string
|
||||
const streaming = nodeData.inputs?.streaming as boolean
|
||||
|
||||
const obj: Partial<OpenAIChatInput> & { openAIApiKey?: string } = {
|
||||
temperature: parseInt(temperature, 10),
|
||||
|
|
@ -121,6 +129,7 @@ class ChatOpenAI_ChatModels implements INode {
|
|||
if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10)
|
||||
if (presencePenalty) obj.presencePenalty = parseInt(presencePenalty, 10)
|
||||
if (timeout) obj.timeout = parseInt(timeout, 10)
|
||||
if (streaming) obj.streaming = streaming
|
||||
|
||||
const model = new ChatOpenAI(obj)
|
||||
return model
|
||||
|
|
|
|||
|
|
@ -106,6 +106,13 @@ class OpenAI_LLMs implements INode {
|
|||
type: 'number',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Streaming',
|
||||
name: 'streaming',
|
||||
type: 'boolean',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -121,6 +128,7 @@ class OpenAI_LLMs implements INode {
|
|||
const timeout = nodeData.inputs?.timeout as string
|
||||
const batchSize = nodeData.inputs?.batchSize as string
|
||||
const bestOf = nodeData.inputs?.bestOf as string
|
||||
const streaming = nodeData.inputs?.streaming as boolean
|
||||
|
||||
const obj: Partial<OpenAIInput> & { openAIApiKey?: string } = {
|
||||
temperature: parseInt(temperature, 10),
|
||||
|
|
@ -135,6 +143,7 @@ class OpenAI_LLMs implements INode {
|
|||
if (timeout) obj.timeout = parseInt(timeout, 10)
|
||||
if (batchSize) obj.batchSize = parseInt(batchSize, 10)
|
||||
if (bestOf) obj.bestOf = parseInt(bestOf, 10)
|
||||
if (streaming) obj.streaming = streaming
|
||||
|
||||
const model = new OpenAI(obj)
|
||||
return model
|
||||
|
|
|
|||
Loading…
Reference in New Issue