Add an Ollama baseUrl in chatflow configuration for Follow-up prompts (#4169)
Add abn Ollama baseUrl in chatflow configuration for Follow-up prompts
This commit is contained in:
parent
c2b830f279
commit
2b9a1ae316
|
|
@ -436,6 +436,7 @@ export type FollowUpPromptProviderConfig = {
|
|||
[key in FollowUpPromptProvider]: {
|
||||
credentialId: string
|
||||
modelName: string
|
||||
baseUrl: string
|
||||
prompt: string
|
||||
temperature: string
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import { z } from 'zod'
|
|||
import { PromptTemplate } from '@langchain/core/prompts'
|
||||
import { StructuredOutputParser } from '@langchain/core/output_parsers'
|
||||
import { ChatGroq } from '@langchain/groq'
|
||||
import ollama from 'ollama'
|
||||
import { Ollama } from 'ollama'
|
||||
|
||||
const FollowUpPromptType = z
|
||||
.object({
|
||||
|
|
@ -122,7 +122,11 @@ export const generateFollowUpPrompts = async (
|
|||
return structuredResponse
|
||||
}
|
||||
case FollowUpPromptProvider.OLLAMA: {
|
||||
const response = await ollama.chat({
|
||||
const ollamaClient = new Ollama({
|
||||
host: providerConfig.baseUrl || 'http://127.0.0.1:11434'
|
||||
})
|
||||
|
||||
const response = await ollamaClient.chat({
|
||||
model: providerConfig.modelName,
|
||||
messages: [
|
||||
{
|
||||
|
|
|
|||
|
|
@ -269,6 +269,14 @@ const followUpPromptsOptions = {
|
|||
name: FollowUpPromptProviders.OLLAMA,
|
||||
icon: ollamaIcon,
|
||||
inputs: [
|
||||
{
|
||||
label: 'Base URL',
|
||||
name: 'baseUrl',
|
||||
type: 'string',
|
||||
placeholder: 'http://127.0.0.1:11434',
|
||||
description: 'Base URL of your Ollama instance',
|
||||
default: 'http://127.0.0.1:11434'
|
||||
},
|
||||
{
|
||||
label: 'Model Name',
|
||||
name: 'modelName',
|
||||
|
|
|
|||
Loading…
Reference in New Issue