Merge branch 'feature/AzureOpenAIEmbeddings' into feature/Azure

This commit is contained in:
chungyau97 2023-05-12 08:12:34 +07:00
commit f506c07233
2 changed files with 102 additions and 0 deletions

View File

@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-brand-azure" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M6 7.5l-4 9.5h4l6 -15z"></path>
<path d="M22 20l-7 -15l-3 7l4 5l-8 3z"></path>
</svg>

After

Width:  |  Height:  |  Size: 392 B

View File

@ -0,0 +1,97 @@
import { AzureOpenAIInput } from 'langchain/chat_models/openai'
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from 'langchain/embeddings/openai'
class AzureOpenAIEmbedding_Embeddings implements INode {
label: string
name: string
type: string
icon: string
category: string
description: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'Azure OpenAI Embeddings'
this.name = 'azureOpenAIEmbeddings'
this.type = 'AzureOpenAIEmbeddings'
this.icon = 'Azure.svg'
this.category = 'Embeddings'
this.description = 'Azure OpenAI API to generate embeddings for a given text'
this.baseClasses = [this.type, ...getBaseClasses(OpenAIEmbeddings)]
this.inputs = [
{
label: 'Azure OpenAI Api Key',
name: 'azureOpenAIApiKey',
type: 'password'
},
{
label: 'Azure OpenAI Api Instance Name',
name: 'azureOpenAIApiInstanceName',
type: 'string',
placeholder: 'YOUR-INSTANCE-NAME'
},
{
label: 'Azure OpenAI Api Deployment Name',
name: 'azureOpenAIApiDeploymentName',
type: 'string',
placeholder: 'YOUR-DEPLOYMENT-NAME'
},
{
label: 'Azure OpenAI Api Version',
name: 'azureOpenAIApiVersion',
type: 'string',
placeholder: 'YOUR-API-VERSION'
},
{
label: 'Strip New Lines',
name: 'stripNewLines',
type: 'boolean',
optional: true,
additionalParams: true
},
{
label: 'Batch Size',
name: 'batchSize',
type: 'number',
optional: true,
additionalParams: true
},
{
label: 'Timeout',
name: 'timeout',
type: 'number',
optional: true,
additionalParams: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const azureOpenAIApiKey = nodeData.inputs?.azureOpenAIApiKey as string
const azureOpenAIApiInstanceName = nodeData.inputs?.azureOpenAIApiInstanceName as string
const azureOpenAIApiDeploymentName = nodeData.inputs?.azureOpenAIApiDeploymentName as string
const azureOpenAIApiVersion = nodeData.inputs?.azureOpenAIApiVersion as string
const stripNewLines = nodeData.inputs?.stripNewLines as boolean
const batchSize = nodeData.inputs?.batchSize as string
const timeout = nodeData.inputs?.timeout as string
const obj: Partial<OpenAIEmbeddingsParams> & Partial<AzureOpenAIInput> = {
azureOpenAIApiKey,
azureOpenAIApiInstanceName,
azureOpenAIApiDeploymentName,
azureOpenAIApiVersion
}
if (stripNewLines) obj.stripNewLines = stripNewLines
if (batchSize) obj.batchSize = parseInt(batchSize, 10)
if (timeout) obj.timeout = parseInt(timeout, 10)
const model = new OpenAIEmbeddings(obj)
return model
}
}
module.exports = { nodeClass: AzureOpenAIEmbedding_Embeddings }