Compare commits

...

1 Commits

Author SHA1 Message Date
chungyau97 b782a8eef6 add streaming for Open AI 2023-05-11 20:24:55 +07:00
3 changed files with 19 additions and 1 deletions

View File

@ -15,7 +15,7 @@ class RetrievalQAChain_Chains implements INode {
inputs: INodeParams[] inputs: INodeParams[]
constructor() { constructor() {
this.label = 'RetrievalQA Chain' this.label = 'Retrieval QA Chain'
this.name = 'retrievalQAChain' this.name = 'retrievalQAChain'
this.type = 'RetrievalQAChain' this.type = 'RetrievalQAChain'
this.icon = 'chain.svg' this.icon = 'chain.svg'

View File

@ -96,6 +96,13 @@ class ChatOpenAI_ChatModels implements INode {
type: 'number', type: 'number',
optional: true, optional: true,
additionalParams: true additionalParams: true
},
{
label: 'Streaming',
name: 'streaming',
type: 'boolean',
optional: true,
additionalParams: true
} }
] ]
} }
@ -109,6 +116,7 @@ class ChatOpenAI_ChatModels implements INode {
const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string
const presencePenalty = nodeData.inputs?.presencePenalty as string const presencePenalty = nodeData.inputs?.presencePenalty as string
const timeout = nodeData.inputs?.timeout as string const timeout = nodeData.inputs?.timeout as string
const streaming = nodeData.inputs?.streaming as boolean
const obj: Partial<OpenAIChatInput> & { openAIApiKey?: string } = { const obj: Partial<OpenAIChatInput> & { openAIApiKey?: string } = {
temperature: parseInt(temperature, 10), temperature: parseInt(temperature, 10),
@ -121,6 +129,7 @@ class ChatOpenAI_ChatModels implements INode {
if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10) if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10)
if (presencePenalty) obj.presencePenalty = parseInt(presencePenalty, 10) if (presencePenalty) obj.presencePenalty = parseInt(presencePenalty, 10)
if (timeout) obj.timeout = parseInt(timeout, 10) if (timeout) obj.timeout = parseInt(timeout, 10)
if (streaming) obj.streaming = streaming
const model = new ChatOpenAI(obj) const model = new ChatOpenAI(obj)
return model return model

View File

@ -106,6 +106,13 @@ class OpenAI_LLMs implements INode {
type: 'number', type: 'number',
optional: true, optional: true,
additionalParams: true additionalParams: true
},
{
label: 'Streaming',
name: 'streaming',
type: 'boolean',
optional: true,
additionalParams: true
} }
] ]
} }
@ -121,6 +128,7 @@ class OpenAI_LLMs implements INode {
const timeout = nodeData.inputs?.timeout as string const timeout = nodeData.inputs?.timeout as string
const batchSize = nodeData.inputs?.batchSize as string const batchSize = nodeData.inputs?.batchSize as string
const bestOf = nodeData.inputs?.bestOf as string const bestOf = nodeData.inputs?.bestOf as string
const streaming = nodeData.inputs?.streaming as boolean
const obj: Partial<OpenAIInput> & { openAIApiKey?: string } = { const obj: Partial<OpenAIInput> & { openAIApiKey?: string } = {
temperature: parseInt(temperature, 10), temperature: parseInt(temperature, 10),
@ -135,6 +143,7 @@ class OpenAI_LLMs implements INode {
if (timeout) obj.timeout = parseInt(timeout, 10) if (timeout) obj.timeout = parseInt(timeout, 10)
if (batchSize) obj.batchSize = parseInt(batchSize, 10) if (batchSize) obj.batchSize = parseInt(batchSize, 10)
if (bestOf) obj.bestOf = parseInt(bestOf, 10) if (bestOf) obj.bestOf = parseInt(bestOf, 10)
if (streaming) obj.streaming = streaming
const model = new OpenAI(obj) const model = new OpenAI(obj)
return model return model