add params for openai
This commit is contained in:
parent
15abd1f168
commit
2ac951689f
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { OpenAIChatInput } from 'langchain/llms/openai'
|
||||||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||||
import { getBaseClasses } from '../../../src/utils'
|
import { getBaseClasses } from '../../../src/utils'
|
||||||
import { ChatOpenAI } from 'langchain/chat_models/openai'
|
import { ChatOpenAI } from 'langchain/chat_models/openai'
|
||||||
|
|
@ -61,6 +62,41 @@ class ChatOpenAI_ChatModels implements INode {
|
||||||
type: 'number',
|
type: 'number',
|
||||||
default: 0.9,
|
default: 0.9,
|
||||||
optional: true
|
optional: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Max Tokens',
|
||||||
|
name: 'maxTokens',
|
||||||
|
type: 'number',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Top Probability',
|
||||||
|
name: 'topP',
|
||||||
|
type: 'number',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Frequency Penalty',
|
||||||
|
name: 'frequencyPenalty',
|
||||||
|
type: 'number',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Presence Penalty',
|
||||||
|
name: 'presencePenalty',
|
||||||
|
type: 'number',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Timeout',
|
||||||
|
name: 'timeout',
|
||||||
|
type: 'number',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
@ -69,12 +105,25 @@ class ChatOpenAI_ChatModels implements INode {
|
||||||
const temperature = nodeData.inputs?.temperature as string
|
const temperature = nodeData.inputs?.temperature as string
|
||||||
const modelName = nodeData.inputs?.modelName as string
|
const modelName = nodeData.inputs?.modelName as string
|
||||||
const openAIApiKey = nodeData.inputs?.openAIApiKey as string
|
const openAIApiKey = nodeData.inputs?.openAIApiKey as string
|
||||||
|
const maxTokens = nodeData.inputs?.maxTokens as string
|
||||||
|
const topP = nodeData.inputs?.topP as string
|
||||||
|
const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string
|
||||||
|
const presencePenalty = nodeData.inputs?.presencePenalty as string
|
||||||
|
const timeout = nodeData.inputs?.timeout as string
|
||||||
|
|
||||||
const model = new ChatOpenAI({
|
const obj: Partial<OpenAIChatInput> & { openAIApiKey?: string } = {
|
||||||
temperature: parseInt(temperature, 10),
|
temperature: parseInt(temperature, 10),
|
||||||
modelName,
|
modelName,
|
||||||
openAIApiKey
|
openAIApiKey
|
||||||
})
|
}
|
||||||
|
|
||||||
|
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
|
||||||
|
if (topP) obj.topP = parseInt(topP, 10)
|
||||||
|
if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10)
|
||||||
|
if (presencePenalty) obj.presencePenalty = parseInt(presencePenalty, 10)
|
||||||
|
if (timeout) obj.timeout = parseInt(timeout, 10)
|
||||||
|
|
||||||
|
const model = new ChatOpenAI(obj)
|
||||||
return model
|
return model
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||||
import { getBaseClasses } from '../../../src/utils'
|
import { getBaseClasses } from '../../../src/utils'
|
||||||
import { OpenAIEmbeddings } from 'langchain/embeddings/openai'
|
import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from 'langchain/embeddings/openai'
|
||||||
|
|
||||||
class OpenAIEmbedding_Embeddings implements INode {
|
class OpenAIEmbedding_Embeddings implements INode {
|
||||||
label: string
|
label: string
|
||||||
|
|
@ -25,14 +25,46 @@ class OpenAIEmbedding_Embeddings implements INode {
|
||||||
label: 'OpenAI Api Key',
|
label: 'OpenAI Api Key',
|
||||||
name: 'openAIApiKey',
|
name: 'openAIApiKey',
|
||||||
type: 'password'
|
type: 'password'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Strip New Lines',
|
||||||
|
name: 'stripNewLines',
|
||||||
|
type: 'boolean',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Batch Size',
|
||||||
|
name: 'batchSize',
|
||||||
|
type: 'number',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Timeout',
|
||||||
|
name: 'timeout',
|
||||||
|
type: 'number',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
async init(nodeData: INodeData): Promise<any> {
|
async init(nodeData: INodeData): Promise<any> {
|
||||||
const openAIApiKey = nodeData.inputs?.openAIApiKey as string
|
const openAIApiKey = nodeData.inputs?.openAIApiKey as string
|
||||||
|
const stripNewLines = nodeData.inputs?.stripNewLines as boolean
|
||||||
|
const batchSize = nodeData.inputs?.batchSize as string
|
||||||
|
const timeout = nodeData.inputs?.timeout as string
|
||||||
|
|
||||||
const model = new OpenAIEmbeddings({ openAIApiKey })
|
const obj: Partial<OpenAIEmbeddingsParams> & { openAIApiKey?: string } = {
|
||||||
|
openAIApiKey
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stripNewLines) obj.stripNewLines = stripNewLines
|
||||||
|
if (batchSize) obj.batchSize = parseInt(batchSize, 10)
|
||||||
|
if (timeout) obj.timeout = parseInt(timeout, 10)
|
||||||
|
|
||||||
|
const model = new OpenAIEmbeddings(obj)
|
||||||
return model
|
return model
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||||
import { getBaseClasses } from '../../../src/utils'
|
import { getBaseClasses } from '../../../src/utils'
|
||||||
import { OpenAI } from 'langchain/llms/openai'
|
import { OpenAI, OpenAIInput } from 'langchain/llms/openai'
|
||||||
|
|
||||||
class OpenAI_LLMs implements INode {
|
class OpenAI_LLMs implements INode {
|
||||||
label: string
|
label: string
|
||||||
|
|
@ -57,6 +57,55 @@ class OpenAI_LLMs implements INode {
|
||||||
type: 'number',
|
type: 'number',
|
||||||
default: 0.7,
|
default: 0.7,
|
||||||
optional: true
|
optional: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Max Tokens',
|
||||||
|
name: 'maxTokens',
|
||||||
|
type: 'number',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Top Probability',
|
||||||
|
name: 'topP',
|
||||||
|
type: 'number',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Best Of',
|
||||||
|
name: 'bestOf',
|
||||||
|
type: 'number',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Frequency Penalty',
|
||||||
|
name: 'frequencyPenalty',
|
||||||
|
type: 'number',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Presence Penalty',
|
||||||
|
name: 'presencePenalty',
|
||||||
|
type: 'number',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Batch Size',
|
||||||
|
name: 'batchSize',
|
||||||
|
type: 'number',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Timeout',
|
||||||
|
name: 'timeout',
|
||||||
|
type: 'number',
|
||||||
|
optional: true,
|
||||||
|
additionalParams: true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
@ -65,12 +114,29 @@ class OpenAI_LLMs implements INode {
|
||||||
const temperature = nodeData.inputs?.temperature as string
|
const temperature = nodeData.inputs?.temperature as string
|
||||||
const modelName = nodeData.inputs?.modelName as string
|
const modelName = nodeData.inputs?.modelName as string
|
||||||
const openAIApiKey = nodeData.inputs?.openAIApiKey as string
|
const openAIApiKey = nodeData.inputs?.openAIApiKey as string
|
||||||
|
const maxTokens = nodeData.inputs?.maxTokens as string
|
||||||
|
const topP = nodeData.inputs?.topP as string
|
||||||
|
const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string
|
||||||
|
const presencePenalty = nodeData.inputs?.presencePenalty as string
|
||||||
|
const timeout = nodeData.inputs?.timeout as string
|
||||||
|
const batchSize = nodeData.inputs?.batchSize as string
|
||||||
|
const bestOf = nodeData.inputs?.bestOf as string
|
||||||
|
|
||||||
const model = new OpenAI({
|
const obj: Partial<OpenAIInput> & { openAIApiKey?: string } = {
|
||||||
temperature: parseInt(temperature, 10),
|
temperature: parseInt(temperature, 10),
|
||||||
modelName,
|
modelName,
|
||||||
openAIApiKey
|
openAIApiKey
|
||||||
})
|
}
|
||||||
|
|
||||||
|
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
|
||||||
|
if (topP) obj.topP = parseInt(topP, 10)
|
||||||
|
if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10)
|
||||||
|
if (presencePenalty) obj.presencePenalty = parseInt(presencePenalty, 10)
|
||||||
|
if (timeout) obj.timeout = parseInt(timeout, 10)
|
||||||
|
if (batchSize) obj.batchSize = parseInt(batchSize, 10)
|
||||||
|
if (bestOf) obj.bestOf = parseInt(bestOf, 10)
|
||||||
|
|
||||||
|
const model = new OpenAI(obj)
|
||||||
return model
|
return model
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue