Merge pull request #141 from FlowiseAI/feature/LocalAI
Bugfix/Add BaseChatModel class to LocalAI
This commit is contained in:
commit
2ffd26b147
|
|
@ -20,7 +20,7 @@ class ChatLocalAI_ChatModels implements INode {
|
||||||
this.icon = 'localai.png'
|
this.icon = 'localai.png'
|
||||||
this.category = 'Chat Models'
|
this.category = 'Chat Models'
|
||||||
this.description = 'Use local LLMs like llama.cpp, gpt4all using LocalAI'
|
this.description = 'Use local LLMs like llama.cpp, gpt4all using LocalAI'
|
||||||
this.baseClasses = [this.type, ...getBaseClasses(OpenAIChat)]
|
this.baseClasses = [this.type, 'BaseChatModel', ...getBaseClasses(OpenAIChat)]
|
||||||
this.inputs = [
|
this.inputs = [
|
||||||
{
|
{
|
||||||
label: 'Base Path',
|
label: 'Base Path',
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue