Revert model var to string, refactor for case without a key and just override if so

This commit is contained in:
Keith Kacsh 2024-01-08 17:53:18 -07:00 committed by Ilango
parent d9b75cdf8e
commit 06201e7cf0
3 changed files with 6 additions and 14 deletions

View File

@ -141,7 +141,6 @@ Flowise support different environment variables to configure your instance. You
| DATABASE_SSL | Database connection overssl (When DATABASE_TYPE is postgre) | Boolean | false | | DATABASE_SSL | Database connection overssl (When DATABASE_TYPE is postgre) | Boolean | false |
| SECRETKEY_PATH | Location where encryption key (used to encrypt/decrypt credentials) is saved | String | `your-path/Flowise/packages/server` | | SECRETKEY_PATH | Location where encryption key (used to encrypt/decrypt credentials) is saved | String | `your-path/Flowise/packages/server` |
| FLOWISE_SECRETKEY_OVERWRITE | Encryption key to be used instead of the key stored in SECRETKEY_PATH | String | | FLOWISE_SECRETKEY_OVERWRITE | Encryption key to be used instead of the key stored in SECRETKEY_PATH | String |
| LOCALAI_CHAT_MODELS | JSON-encoded string representing an array of chat models for LocalAI. Each object in the array should have a 'label' and 'name' property. | String | '[]' (Empty Array) |
You can also specify the env variables when using `npx`. For example: You can also specify the env variables when using `npx`. For example:

View File

@ -33,9 +33,6 @@ class ChatLocalAI_ChatModels implements INode {
credentialNames: ['LocalAIApi'], credentialNames: ['LocalAIApi'],
optional: true optional: true
} }
const modelOptions = JSON.parse(process.env.LOCALAI_CHAT_MODELS || '[]');
this.inputs = [ this.inputs = [
{ {
label: 'Cache', label: 'Cache',
@ -52,10 +49,8 @@ class ChatLocalAI_ChatModels implements INode {
{ {
label: 'Model Name', label: 'Model Name',
name: 'modelName', name: 'modelName',
type: 'options', type: 'string',
options: modelOptions, placeholder: 'gpt4all-lora-quantized.bin'
default: modelOptions.length > 0 ? modelOptions[0].name : '',
optional: true
}, },
{ {
label: 'Temperature', label: 'Temperature',
@ -99,22 +94,22 @@ class ChatLocalAI_ChatModels implements INode {
const topP = nodeData.inputs?.topP as string const topP = nodeData.inputs?.topP as string
const timeout = nodeData.inputs?.timeout as string const timeout = nodeData.inputs?.timeout as string
const basePath = nodeData.inputs?.basePath as string const basePath = nodeData.inputs?.basePath as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options) const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const openAIApiKey = getCredentialParam('LocalAIApiKey', credentialData, nodeData) const localAIApiKey = getCredentialParam('LocalAIApiKey', credentialData, nodeData)
const cache = nodeData.inputs?.cache as BaseCache const cache = nodeData.inputs?.cache as BaseCache
const obj: Partial<OpenAIChatInput> & BaseLLMParams & { openAIApiKey?: string } = { const obj: Partial<OpenAIChatInput> & BaseLLMParams & { localAIApiKey?: string } = {
temperature: parseFloat(temperature), temperature: parseFloat(temperature),
modelName, modelName,
openAIApiKey openAIApiKey: 'sk-'
} }
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
if (topP) obj.topP = parseFloat(topP) if (topP) obj.topP = parseFloat(topP)
if (timeout) obj.timeout = parseInt(timeout, 10) if (timeout) obj.timeout = parseInt(timeout, 10)
if (cache) obj.cache = cache if (cache) obj.cache = cache
if (localAIApiKey) obj.openAIApiKey = localAIApiKey
const model = new OpenAIChat(obj, { basePath }) const model = new OpenAIChat(obj, { basePath })

View File

@ -26,5 +26,3 @@ PORT=3000
# LANGCHAIN_ENDPOINT=https://api.smith.langchain.com # LANGCHAIN_ENDPOINT=https://api.smith.langchain.com
# LANGCHAIN_API_KEY=your_api_key # LANGCHAIN_API_KEY=your_api_key
# LANGCHAIN_PROJECT=your_project # LANGCHAIN_PROJECT=your_project
# LOCALAI_CHAT_MODELS='[{"label": "model1", "name": "model1"}, {"label": "model2", "name": "model2"}]'