feature/add-perplexity-node (#4376)

* added perplexity node

* last stable release

* Update ChatPerplexity.ts

* update

* Update ChatPerplexity.ts

* Update ChatPerplexity.ts

* pnpm lint fix

* feat: update @langchain/community from 0.3.24 to 0.3.29

---------

Co-authored-by: Marvin <marvin.chin@regentmarkets.com>
This commit is contained in:
Ong Chung Yau 2025-05-05 14:34:07 +08:00 committed by GitHub
parent b55fe07511
commit df26e8aef9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 36548 additions and 36698 deletions

View File

@ -0,0 +1,27 @@
import { INodeParams, INodeCredential } from '../src/Interface'
class PerplexityApi implements INodeCredential {
label: string
name: string
version: number
description: string
inputs: INodeParams[]
constructor() {
this.label = 'Perplexity API'
this.name = 'perplexityApi'
this.version = 1.0
this.description =
'Refer to <a target="_blank" href="https://docs.perplexity.ai/docs/getting-started">official guide</a> on how to get API key'
this.inputs = [
{
label: 'Perplexity API Key',
name: 'perplexityApiKey',
type: 'password',
placeholder: '<PERPLEXITY_API_KEY>'
}
]
}
}
module.exports = { credClass: PerplexityApi }

View File

@ -26,7 +26,7 @@
}, },
{ {
"label": "anthropic.claude-3.5-sonnet-20240620-v1:0", "label": "anthropic.claude-3.5-sonnet-20240620-v1:0",
"name": "anthropic.claude-3-5-sonnet-20240620-v1:0", "name": "anthropic.claude-3.5-sonnet-20240620-v1:0",
"description": "(20240620-v1:0) specific version of Claude Sonnet 3.5 - most intelligent model", "description": "(20240620-v1:0) specific version of Claude Sonnet 3.5 - most intelligent model",
"input_cost": 3e-6, "input_cost": 3e-6,
"output_cost": 0.000015 "output_cost": 0.000015
@ -1074,6 +1074,47 @@
} }
] ]
}, },
{
"name": "chatPerplexity",
"models": [
{
"label": "sonar",
"name": "sonar",
"input_cost": 1e-6,
"output_cost": 1e-6
},
{
"label": "sonar-pro",
"name": "sonar-pro",
"input_cost": 3e-6,
"output_cost": 1.5e-5
},
{
"label": "sonar-reasoning",
"name": "sonar-reasoning",
"input_cost": 1e-6,
"output_cost": 5e-6
},
{
"label": "sonar-reasoning-pro",
"name": "sonar-reasoning-pro",
"input_cost": 2e-6,
"output_cost": 8e-6
},
{
"label": "sonar-deep-research",
"name": "sonar",
"input_cost": 2e-6,
"output_cost": 8e-6
},
{
"label": "r1-1776",
"name": "r1-1776",
"input_cost": 2e-6,
"output_cost": 8e-6
}
]
},
{ {
"name": "chatMistralAI", "name": "chatMistralAI",
"models": [ "models": [

View File

@ -0,0 +1,237 @@
import { ChatPerplexity as LangchainChatPerplexity, PerplexityChatInput } from '@langchain/community/chat_models/perplexity'
import { BaseCache } from '@langchain/core/caches'
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ChatPerplexity } from './FlowiseChatPerplexity'
import { getModels, MODEL_TYPE } from '../../../src/modelLoader'
class ChatPerplexity_ChatModels implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]
constructor() {
this.label = 'ChatPerplexity'
this.name = 'chatPerplexity'
this.version = 0.1
this.type = 'ChatPerplexity'
this.icon = 'perplexity.svg'
this.category = 'Chat Models'
this.description = 'Wrapper around Perplexity large language models that use the Chat endpoint'
this.baseClasses = [this.type, ...getBaseClasses(LangchainChatPerplexity)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['perplexityApi']
}
this.inputs = [
{
label: 'Cache',
name: 'cache',
type: 'BaseCache',
optional: true
},
{
label: 'Model Name',
name: 'model',
type: 'asyncOptions',
loadMethod: 'listModels',
default: 'sonar'
},
{
label: 'Temperature',
name: 'temperature',
type: 'number',
step: 0.1,
default: 1,
optional: true
},
{
label: 'Max Tokens',
name: 'maxTokens',
type: 'number',
step: 1,
optional: true,
additionalParams: true
},
{
label: 'Top P',
name: 'topP',
type: 'number',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Top K',
name: 'topK',
type: 'number',
step: 1,
optional: true,
additionalParams: true
},
{
label: 'Presence Penalty',
name: 'presencePenalty',
type: 'number',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Frequency Penalty',
name: 'frequencyPenalty',
type: 'number',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Streaming',
name: 'streaming',
type: 'boolean',
default: true,
optional: true,
additionalParams: true
},
{
label: 'Timeout',
name: 'timeout',
type: 'number',
step: 1,
optional: true,
additionalParams: true
},
// {
// label: 'Search Domain Filter',
// name: 'searchDomainFilter',
// type: 'json',
// optional: true,
// additionalParams: true,
// description: 'Limit citations to URLs from specified domains (e.g., ["example.com", "anotherexample.org"])'
// },
// Currently disabled as output is stored as additional_kwargs
// {
// label: 'Return Images',
// name: 'returnImages',
// type: 'boolean',
// optional: true,
// additionalParams: true,
// description: 'Whether the model should return images (if supported by the model)'
// },
// Currently disabled as output is stored as additional_kwargs
// {
// label: 'Return Related Questions',
// name: 'returnRelatedQuestions',
// type: 'boolean',
// optional: true,
// additionalParams: true,
// description: 'Whether the online model should return related questions'
// },
// {
// label: 'Search Recency Filter',
// name: 'searchRecencyFilter',
// type: 'options',
// options: [
// { label: 'Not Set', name: '' },
// { label: 'Month', name: 'month' },
// { label: 'Week', name: 'week' },
// { label: 'Day', name: 'day' },
// { label: 'Hour', name: 'hour' }
// ],
// default: '',
// optional: true,
// additionalParams: true,
// description: 'Filter search results by time interval (does not apply to images)'
// },
{
label: 'Proxy Url',
name: 'proxyUrl',
type: 'string',
optional: true,
additionalParams: true
}
// LangchainJS currently does not has a web_search_options, search_after_date_filter or search_before_date_filter parameter.
// To add web_search_options (user_location, search_context_size) and search_after_date_filter, search_before_date_filter as a modelKwargs parameter.
]
}
//@ts-ignore
loadMethods = {
async listModels(): Promise<INodeOptionsValue[]> {
return await getModels(MODEL_TYPE.CHAT, 'chatPerplexity')
}
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const model = nodeData.inputs?.model as string
const temperature = nodeData.inputs?.temperature as string
const maxTokens = nodeData.inputs?.maxTokens as string
const topP = nodeData.inputs?.topP as string
const topK = nodeData.inputs?.topK as string
const presencePenalty = nodeData.inputs?.presencePenalty as string
const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string
const streaming = nodeData.inputs?.streaming as boolean
const timeout = nodeData.inputs?.timeout as string
const searchDomainFilterRaw = nodeData.inputs?.searchDomainFilter
const returnImages = nodeData.inputs?.returnImages as boolean
const returnRelatedQuestions = nodeData.inputs?.returnRelatedQuestions as boolean
const searchRecencyFilter = nodeData.inputs?.searchRecencyFilter as string
const proxyUrl = nodeData.inputs?.proxyUrl as string
const cache = nodeData.inputs?.cache as BaseCache
if (nodeData.inputs?.credentialId) {
nodeData.credential = nodeData.inputs?.credentialId
}
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('perplexityApiKey', credentialData, nodeData)
if (!apiKey) {
throw new Error('Perplexity API Key missing from credential')
}
const obj: PerplexityChatInput = {
model,
apiKey,
streaming: streaming ?? true
}
if (temperature) obj.temperature = parseFloat(temperature)
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
if (topP) obj.topP = parseFloat(topP)
if (topK) obj.topK = parseInt(topK, 10)
if (presencePenalty) obj.presencePenalty = parseFloat(presencePenalty)
if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty)
if (timeout) obj.timeout = parseInt(timeout, 10)
if (returnImages) obj.returnImages = returnImages
if (returnRelatedQuestions) obj.returnRelatedQuestions = returnRelatedQuestions
if (searchRecencyFilter && searchRecencyFilter !== '') obj.searchRecencyFilter = searchRecencyFilter
if (cache) obj.cache = cache
if (searchDomainFilterRaw) {
try {
obj.searchDomainFilter =
typeof searchDomainFilterRaw === 'object' ? searchDomainFilterRaw : JSON.parse(searchDomainFilterRaw)
} catch (exception) {
throw new Error('Invalid JSON in Search Domain Filter: ' + exception)
}
}
if (proxyUrl) {
console.warn('Proxy configuration for ChatPerplexity might require adjustments to FlowiseChatPerplexity wrapper.')
}
const perplexityModel = new ChatPerplexity(nodeData.id, obj)
return perplexityModel
}
}
module.exports = { nodeClass: ChatPerplexity_ChatModels }

View File

@ -0,0 +1,32 @@
import { ChatPerplexity as LangchainChatPerplexity, type PerplexityChatInput } from '@langchain/community/chat_models/perplexity'
import { IMultiModalOption, IVisionChatModal } from '../../../src'
// Extend the Langchain ChatPerplexity class to include Flowise-specific properties and methods
export class ChatPerplexity extends LangchainChatPerplexity implements IVisionChatModal {
configuredModel: string
configuredMaxToken?: number
multiModalOption: IMultiModalOption
id: string
constructor(id: string, fields: PerplexityChatInput) {
super(fields)
this.id = id
this.configuredModel = fields?.model ?? '' // Use model from fields
this.configuredMaxToken = fields?.maxTokens
}
// Method to revert to the original model configuration
revertToOriginalModel(): void {
this.model = this.configuredModel
this.maxTokens = this.configuredMaxToken
}
// Method to set multimodal options
setMultiModalOption(multiModalOption: IMultiModalOption): void {
this.multiModalOption = multiModalOption
}
setVisionModel(): void {
// pass
}
}

View File

@ -0,0 +1,8 @@
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 48 48" style="enable-background:new 0 0 48 48;" xml:space="preserve">
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:black;" d="M11.469,4l11.39,10.494v-0.002V4.024h2.217v10.517
L36.518,4v11.965h4.697v17.258h-4.683v10.654L25.077,33.813v10.18h-2.217V33.979L11.482,44V33.224H6.785V15.965h4.685V4z
M21.188,18.155H9.002v12.878h2.477v-4.062L21.188,18.155z M13.699,27.943v11.17l9.16-8.068V19.623L13.699,27.943z M25.141,30.938
V19.612l9.163,8.321v5.291h0.012v5.775L25.141,30.938z M36.532,31.033h2.466V18.155H26.903l9.629,8.725V31.033z M34.301,15.965
V9.038l-7.519,6.927H34.301z M21.205,15.965h-7.519V9.038L21.205,15.965z"/>
</svg>

After

Width:  |  Height:  |  Size: 775 B

View File

@ -42,7 +42,7 @@
"@langchain/aws": "0.1.4", "@langchain/aws": "0.1.4",
"@langchain/baidu-qianfan": "^0.1.0", "@langchain/baidu-qianfan": "^0.1.0",
"@langchain/cohere": "^0.0.7", "@langchain/cohere": "^0.0.7",
"@langchain/community": "^0.3.24", "@langchain/community": "^0.3.29",
"@langchain/core": "0.3.37", "@langchain/core": "0.3.37",
"@langchain/exa": "^0.0.5", "@langchain/exa": "^0.0.5",
"@langchain/google-genai": "0.2.3", "@langchain/google-genai": "0.2.3",

File diff suppressed because it is too large Load Diff