Merge pull request #1495 from kkacsh321/localai_auth

FEATURE: LocalAI Credential, Optional LocalAI Auth, Model list env var
This commit is contained in:
Henry Heng 2024-01-14 12:14:07 +00:00 committed by GitHub
commit 5239b2707f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 54 additions and 5 deletions

View File

@ -0,0 +1,23 @@
import { INodeParams, INodeCredential } from '../src/Interface'
class LocalAIApi implements INodeCredential {
label: string
name: string
version: number
inputs: INodeParams[]
constructor() {
this.label = 'LocalAI API'
this.name = 'localAIApi'
this.version = 1.0
this.inputs = [
{
label: 'LocalAI Api Key',
name: 'localAIApiKey',
type: 'password'
}
]
}
}
module.exports = { credClass: LocalAIApi }

View File

@ -1,5 +1,5 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface' import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils' import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { OpenAIChat } from 'langchain/llms/openai' import { OpenAIChat } from 'langchain/llms/openai'
import { OpenAIChatInput } from 'langchain/chat_models/openai' import { OpenAIChatInput } from 'langchain/chat_models/openai'
import { BaseCache } from 'langchain/schema' import { BaseCache } from 'langchain/schema'
@ -14,6 +14,7 @@ class ChatLocalAI_ChatModels implements INode {
category: string category: string
description: string description: string
baseClasses: string[] baseClasses: string[]
credential: INodeParams
inputs: INodeParams[] inputs: INodeParams[]
constructor() { constructor() {
@ -25,6 +26,13 @@ class ChatLocalAI_ChatModels implements INode {
this.category = 'Chat Models' this.category = 'Chat Models'
this.description = 'Use local LLMs like llama.cpp, gpt4all using LocalAI' this.description = 'Use local LLMs like llama.cpp, gpt4all using LocalAI'
this.baseClasses = [this.type, 'BaseChatModel', ...getBaseClasses(OpenAIChat)] this.baseClasses = [this.type, 'BaseChatModel', ...getBaseClasses(OpenAIChat)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['localAIApi'],
optional: true
}
this.inputs = [ this.inputs = [
{ {
label: 'Cache', label: 'Cache',
@ -79,13 +87,16 @@ class ChatLocalAI_ChatModels implements INode {
] ]
} }
async init(nodeData: INodeData): Promise<any> { async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const temperature = nodeData.inputs?.temperature as string const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string const modelName = nodeData.inputs?.modelName as string
const maxTokens = nodeData.inputs?.maxTokens as string const maxTokens = nodeData.inputs?.maxTokens as string
const topP = nodeData.inputs?.topP as string const topP = nodeData.inputs?.topP as string
const timeout = nodeData.inputs?.timeout as string const timeout = nodeData.inputs?.timeout as string
const basePath = nodeData.inputs?.basePath as string const basePath = nodeData.inputs?.basePath as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const localAIApiKey = getCredentialParam('localAIApiKey', credentialData, nodeData)
const cache = nodeData.inputs?.cache as BaseCache const cache = nodeData.inputs?.cache as BaseCache
const obj: Partial<OpenAIChatInput> & BaseLLMParams & { openAIApiKey?: string } = { const obj: Partial<OpenAIChatInput> & BaseLLMParams & { openAIApiKey?: string } = {
@ -98,6 +109,7 @@ class ChatLocalAI_ChatModels implements INode {
if (topP) obj.topP = parseFloat(topP) if (topP) obj.topP = parseFloat(topP)
if (timeout) obj.timeout = parseInt(timeout, 10) if (timeout) obj.timeout = parseInt(timeout, 10)
if (cache) obj.cache = cache if (cache) obj.cache = cache
if (localAIApiKey) obj.openAIApiKey = localAIApiKey
const model = new OpenAIChat(obj, { basePath }) const model = new OpenAIChat(obj, { basePath })

View File

@ -1,4 +1,5 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface' import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getCredentialData, getCredentialParam } from '../../../src/utils'
import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from 'langchain/embeddings/openai' import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from 'langchain/embeddings/openai'
class LocalAIEmbedding_Embeddings implements INode { class LocalAIEmbedding_Embeddings implements INode {
@ -10,6 +11,7 @@ class LocalAIEmbedding_Embeddings implements INode {
category: string category: string
description: string description: string
baseClasses: string[] baseClasses: string[]
credential: INodeParams
inputs: INodeParams[] inputs: INodeParams[]
constructor() { constructor() {
@ -21,6 +23,13 @@ class LocalAIEmbedding_Embeddings implements INode {
this.category = 'Embeddings' this.category = 'Embeddings'
this.description = 'Use local embeddings models like llama.cpp' this.description = 'Use local embeddings models like llama.cpp'
this.baseClasses = [this.type, 'Embeddings'] this.baseClasses = [this.type, 'Embeddings']
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['localAIApi'],
optional: true
}
this.inputs = [ this.inputs = [
{ {
label: 'Base Path', label: 'Base Path',
@ -37,15 +46,20 @@ class LocalAIEmbedding_Embeddings implements INode {
] ]
} }
async init(nodeData: INodeData): Promise<any> { async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const modelName = nodeData.inputs?.modelName as string const modelName = nodeData.inputs?.modelName as string
const basePath = nodeData.inputs?.basePath as string const basePath = nodeData.inputs?.basePath as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const localAIApiKey = getCredentialParam('localAIApiKey', credentialData, nodeData)
const obj: Partial<OpenAIEmbeddingsParams> & { openAIApiKey?: string } = { const obj: Partial<OpenAIEmbeddingsParams> & { openAIApiKey?: string } = {
modelName, modelName,
openAIApiKey: 'sk-' openAIApiKey: 'sk-'
} }
if (localAIApiKey) obj.openAIApiKey = localAIApiKey
const model = new OpenAIEmbeddings(obj, { basePath }) const model = new OpenAIEmbeddings(obj, { basePath })
return model return model