Merge branch 'main' into feature/ChatHistory2

This commit is contained in:
chungyau97 2023-10-02 14:25:33 +08:00
commit e22e797428
20 changed files with 267 additions and 19 deletions

View File

@ -1,6 +1,6 @@
{
"name": "flowise",
"version": "1.3.5",
"version": "1.3.6",
"private": true,
"homepage": "https://flowiseai.com",
"workspaces": [

View File

@ -0,0 +1,32 @@
import { INodeParams, INodeCredential } from '../src/Interface'
class LLMonitorApi implements INodeCredential {
label: string
name: string
version: number
description: string
inputs: INodeParams[]
constructor() {
this.label = 'LLMonitor API'
this.name = 'llmonitorApi'
this.version = 1.0
this.description = 'Refer to <a target="_blank" href="https://llmonitor.com/docs">official guide</a> to get APP ID'
this.inputs = [
{
label: 'APP ID',
name: 'llmonitorAppId',
type: 'password',
placeholder: '<LLMonitor_APP_ID>'
},
{
label: 'Endpoint',
name: 'llmonitorEndpoint',
type: 'string',
default: 'https://app.llmonitor.com'
}
]
}
}
module.exports = { credClass: LLMonitorApi }

View File

@ -0,0 +1,33 @@
import { INode, INodeParams } from '../../../src/Interface'
class LLMonitor_Analytic implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs?: INodeParams[]
credential: INodeParams
constructor() {
this.label = 'LLMonitor'
this.name = 'llmonitor'
this.version = 1.0
this.type = 'LLMonitor'
this.icon = 'llmonitor.png'
this.category = 'Analytic'
this.baseClasses = [this.type]
this.inputs = []
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['llmonitorApi']
}
}
}
module.exports = { nodeClass: LLMonitor_Analytic }

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

@ -43,7 +43,7 @@ class SqlDatabaseChain_Chains implements INode {
constructor() {
this.label = 'Sql Database Chain'
this.name = 'sqlDatabaseChain'
this.version = 3.0
this.version = 4.0
this.type = 'SqlDatabaseChain'
this.icon = 'sqlchain.svg'
this.category = 'Chains'
@ -89,7 +89,8 @@ class SqlDatabaseChain_Chains implements INode {
label: 'Include Tables',
name: 'includesTables',
type: 'string',
description: 'Tables to include for queries.',
description: 'Tables to include for queries, seperated by comma. Can only use Include Tables or Ignore Tables',
placeholder: 'table1, table2',
additionalParams: true,
optional: true
},
@ -97,7 +98,8 @@ class SqlDatabaseChain_Chains implements INode {
label: 'Ignore Tables',
name: 'ignoreTables',
type: 'string',
description: 'Tables to ignore for queries.',
description: 'Tables to ignore for queries, seperated by comma. Can only use Ignore Tables or Include Tables',
placeholder: 'table1, table2',
additionalParams: true,
optional: true
},

View File

@ -0,0 +1,46 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { NIBittensorChatModel, BittensorInput } from 'langchain/experimental/chat_models/bittensor'
class Bittensor_ChatModels implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'NIBittensorChat'
this.name = 'NIBittensorChatModel'
this.version = 1.0
this.type = 'BittensorChat'
this.icon = 'logo.png'
this.category = 'Chat Models'
this.description = 'Wrapper around Bittensor subnet 1 large language models'
this.baseClasses = [this.type, ...getBaseClasses(NIBittensorChatModel)]
this.inputs = [
{
label: 'System prompt',
name: 'system_prompt',
type: 'string',
additionalParams: true,
optional: true
}
]
}
async init(nodeData: INodeData, _: string): Promise<any> {
const system_prompt = nodeData.inputs?.system_prompt as string
const obj: Partial<BittensorInput> = {
systemPrompt: system_prompt
}
const model = new NIBittensorChatModel(obj)
return model
}
}
module.exports = { nodeClass: Bittensor_ChatModels }

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

View File

@ -0,0 +1,57 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { NIBittensorLLM, BittensorInput } from 'langchain/experimental/llms/bittensor'
class Bittensor_LLMs implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'NIBittensorLLM'
this.name = 'NIBittensorLLM'
this.version = 1.0
this.type = 'Bittensor'
this.icon = 'logo.png'
this.category = 'LLMs'
this.description = 'Wrapper around Bittensor subnet 1 large language models'
this.baseClasses = [this.type, ...getBaseClasses(NIBittensorLLM)]
this.inputs = [
{
label: 'System prompt',
name: 'system_prompt',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Top Responses',
name: 'topResponses',
type: 'number',
step: 1,
optional: true,
additionalParams: true
}
]
}
async init(nodeData: INodeData, _: string): Promise<any> {
const system_prompt = nodeData.inputs?.system_prompt as string
const topResponses = Number(nodeData.inputs?.topResponses as number)
const obj: Partial<BittensorInput> = {
systemPrompt: system_prompt,
topResponses: topResponses
}
const model = new NIBittensorLLM(obj)
return model
}
}
module.exports = { nodeClass: Bittensor_LLMs }

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

View File

@ -30,7 +30,7 @@ export class ChromaExtended extends Chroma {
if (this.chromaApiKey) {
obj.fetchOptions = {
headers: {
'X-Api-Key': this.chromaApiKey
Authorization: `Bearer ${this.chromaApiKey}`
}
}
}

View File

@ -1,6 +1,6 @@
{
"name": "flowise-components",
"version": "1.3.6",
"version": "1.3.7",
"description": "Flowiseai Components",
"main": "dist/src/index",
"types": "dist/src/index.d.ts",
@ -46,6 +46,7 @@
"langfuse-langchain": "^1.0.14-alpha.0",
"langsmith": "^0.0.32",
"linkifyjs": "^4.1.1",
"llmonitor": "^0.5.5",
"mammoth": "^1.5.1",
"moment": "^2.29.3",
"mysql2": "^3.5.1",

View File

@ -4,6 +4,7 @@ import { Logger } from 'winston'
import { Server } from 'socket.io'
import { Client } from 'langsmith'
import { LangChainTracer } from 'langchain/callbacks'
import { LLMonitorHandler } from 'langchain/callbacks/handlers/llmonitor'
import { getCredentialData, getCredentialParam } from './utils'
import { ICommonObject, INodeData } from './Interface'
import CallbackHandler from 'langfuse-langchain'
@ -194,11 +195,11 @@ export const additionalCallbacks = async (nodeData: INodeData, options: ICommonO
for (const provider in analytic) {
const providerStatus = analytic[provider].status as boolean
if (providerStatus) {
const credentialId = analytic[provider].credentialId as string
const credentialData = await getCredentialData(credentialId ?? '', options)
if (provider === 'langSmith') {
const credentialId = analytic[provider].credentialId as string
const langSmithProject = analytic[provider].projectName as string
const credentialData = await getCredentialData(credentialId ?? '', options)
const langSmithApiKey = getCredentialParam('langSmithApiKey', credentialData, nodeData)
const langSmithEndpoint = getCredentialParam('langSmithEndpoint', credentialData, nodeData)
@ -214,13 +215,11 @@ export const additionalCallbacks = async (nodeData: INodeData, options: ICommonO
})
callbacks.push(tracer)
} else if (provider === 'langFuse') {
const credentialId = analytic[provider].credentialId as string
const flushAt = analytic[provider].flushAt as string
const flushInterval = analytic[provider].flushInterval as string
const requestTimeout = analytic[provider].requestTimeout as string
const release = analytic[provider].release as string
const credentialData = await getCredentialData(credentialId ?? '', options)
const langFuseSecretKey = getCredentialParam('langFuseSecretKey', credentialData, nodeData)
const langFusePublicKey = getCredentialParam('langFusePublicKey', credentialData, nodeData)
const langFuseEndpoint = getCredentialParam('langFuseEndpoint', credentialData, nodeData)
@ -237,6 +236,17 @@ export const additionalCallbacks = async (nodeData: INodeData, options: ICommonO
const handler = new CallbackHandler(langFuseOptions)
callbacks.push(handler)
} else if (provider === 'llmonitor') {
const llmonitorAppId = getCredentialParam('llmonitorAppId', credentialData, nodeData)
const llmonitorEndpoint = getCredentialParam('llmonitorEndpoint', credentialData, nodeData)
const llmonitorFields: ICommonObject = {
appId: llmonitorAppId,
apiUrl: llmonitorEndpoint ?? 'https://app.llmonitor.com'
}
const handler = new LLMonitorHandler(llmonitorFields)
callbacks.push(handler)
}
}
}

View File

@ -167,7 +167,7 @@
"data": {
"id": "sqlDatabaseChain_0",
"label": "Sql Database Chain",
"version": 2,
"version": 4,
"name": "sqlDatabaseChain",
"type": "SqlDatabaseChain",
"baseClasses": ["SqlDatabaseChain", "BaseChain", "Runnable"],
@ -206,6 +206,46 @@
"placeholder": "1270.0.0.1:5432/chinook",
"id": "sqlDatabaseChain_0-input-url-string"
},
{
"label": "Include Tables",
"name": "includesTables",
"type": "string",
"description": "Tables to include for queries, seperated by comma. Can only use Include Tables or Ignore Tables",
"placeholder": "table1, table2",
"additionalParams": true,
"optional": true,
"id": "sqlDatabaseChain_0-input-includesTables-string"
},
{
"label": "Ignore Tables",
"name": "ignoreTables",
"type": "string",
"description": "Tables to ignore for queries, seperated by comma. Can only use Ignore Tables or Include Tables",
"placeholder": "table1, table2",
"additionalParams": true,
"optional": true,
"id": "sqlDatabaseChain_0-input-ignoreTables-string"
},
{
"label": "Sample table's rows info",
"name": "sampleRowsInTableInfo",
"type": "number",
"description": "Number of sample row for tables to load for info.",
"placeholder": "3",
"additionalParams": true,
"optional": true,
"id": "sqlDatabaseChain_0-input-sampleRowsInTableInfo-number"
},
{
"label": "Top Keys",
"name": "topK",
"type": "number",
"description": "If you are querying for several rows of a table you can select the maximum number of results you want to get by using the top_k parameter (default is 10). This is useful for avoiding query results that exceed the prompt max length or consume tokens unnecessarily.",
"placeholder": "10",
"additionalParams": true,
"optional": true,
"id": "sqlDatabaseChain_0-input-topK-number"
},
{
"label": "Custom Prompt",
"name": "customPrompt",

View File

@ -1,6 +1,6 @@
{
"name": "flowise",
"version": "1.3.5",
"version": "1.3.6",
"description": "Flowiseai Server",
"main": "dist/index",
"types": "dist/index.d.ts",

View File

@ -27,6 +27,7 @@ export default class Start extends Command {
LOG_LEVEL: Flags.string(),
TOOL_FUNCTION_BUILTIN_DEP: Flags.string(),
TOOL_FUNCTION_EXTERNAL_DEP: Flags.string(),
NUMBER_OF_PROXIES: Flags.string(),
DATABASE_TYPE: Flags.string(),
DATABASE_PATH: Flags.string(),
DATABASE_PORT: Flags.string(),
@ -72,6 +73,7 @@ export default class Start extends Command {
if (flags.PORT) process.env.PORT = flags.PORT
if (flags.DEBUG) process.env.DEBUG = flags.DEBUG
if (flags.NUMBER_OF_PROXIES) process.env.NUMBER_OF_PROXIES = flags.NUMBER_OF_PROXIES
// Authorization
if (flags.FLOWISE_USERNAME) process.env.FLOWISE_USERNAME = flags.FLOWISE_USERNAME

View File

@ -12,7 +12,7 @@ async function addRateLimiter(id: string, duration: number, limit: number, messa
rateLimiters[id] = rateLimit({
windowMs: duration * 1000,
max: limit,
handler: (req, res) => {
handler: (_, res) => {
res.status(429).send(message)
}
})
@ -33,15 +33,19 @@ export function getRateLimiter(req: Request, res: Response, next: NextFunction)
export async function createRateLimiter(chatFlow: IChatFlow) {
if (!chatFlow.apiConfig) return
const apiConfig: any = JSON.parse(chatFlow.apiConfig)
const apiConfig = JSON.parse(chatFlow.apiConfig)
const rateLimit: { limitDuration: number; limitMax: number; limitMsg: string } = apiConfig.rateLimit
if (!rateLimit) return
const { limitDuration, limitMax, limitMsg } = rateLimit
if (limitMax && limitDuration && limitMsg) await addRateLimiter(chatFlow.id, limitDuration, limitMax, limitMsg)
}
export async function initializeRateLimiter(chatFlowPool: IChatFlow[]) {
await chatFlowPool.map(async (chatFlow) => {
await createRateLimiter(chatFlow)
})
await Promise.all(
chatFlowPool.map(async (chatFlow) => {
await createRateLimiter(chatFlow)
})
)
}

View File

@ -1,6 +1,6 @@
{
"name": "flowise-ui",
"version": "1.3.3",
"version": "1.3.4",
"license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://flowiseai.com",
"author": {

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

@ -31,6 +31,7 @@ import { Input } from 'ui-component/input/Input'
import { StyledButton } from 'ui-component/button/StyledButton'
import langsmithPNG from 'assets/images/langchain.png'
import langfusePNG from 'assets/images/langfuse.png'
import llmonitorPNG from 'assets/images/llmonitor.png'
// store
import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from 'store/actions'
@ -115,6 +116,26 @@ const analyticProviders = [
optional: true
}
]
},
{
label: 'LLMonitor',
name: 'llmonitor',
icon: llmonitorPNG,
url: 'https://llmonitor.com',
inputs: [
{
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['llmonitorApi']
},
{
label: 'On/Off',
name: 'status',
type: 'boolean',
optional: true
}
]
}
]

View File

@ -136,7 +136,7 @@ const Configuration = () => {
<TooltipWithParser
style={{ mb: 1, mt: 2, marginLeft: 10 }}
title={
'Visit <a target="_blank" href="https://docs.flowiseai.com/deployment#rate-limit-setup-guide">Rate Limit Setup Guide</a> to set up Rate Limit correctly in your hosting environment.'
'Visit <a target="_blank" href="https://docs.flowiseai.com/rate-limit">Rate Limit Setup Guide</a> to set up Rate Limit correctly in your hosting environment.'
}
/>
</Typography>