Merge branch 'main' into feature/Credential

# Conflicts:
#	README.md
#	docker/.env.example
#	packages/components/nodes/documentloaders/Notion/NotionDB.ts
#	packages/components/nodes/memory/DynamoDb/DynamoDb.ts
#	packages/components/nodes/memory/MotorheadMemory/MotorheadMemory.ts
#	packages/components/nodes/memory/ZepMemory/ZepMemory.ts
#	packages/components/package.json
#	packages/components/src/utils.ts
#	packages/server/.env.example
#	packages/server/README.md
#	packages/server/marketplaces/chatflows/Conversational Retrieval QA Chain.json
#	packages/server/src/ChildProcess.ts
#	packages/server/src/DataSource.ts
#	packages/server/src/commands/start.ts
#	packages/server/src/index.ts
#	packages/server/src/utils/index.ts
#	packages/server/src/utils/logger.ts
This commit is contained in:
Henry 2023-07-27 11:26:34 +01:00
commit 61dabbb7da
107 changed files with 4347 additions and 826 deletions

View File

@ -114,9 +114,42 @@ Flowise has 3 different modules in a single mono repository.
11. Commit code and submit Pull Request from forked branch pointing to [Flowise master](https://github.com/FlowiseAI/Flowise/tree/master).
## 🌱 Env Variables
Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder. Read [more](https://docs.flowiseai.com/environment-variables)
| Variable | Description | Type | Default |
| -------------------------- | ---------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- |
| PORT | The HTTP port Flowise runs on | Number | 3000 |
| FLOWISE_USERNAME | Username to login | String | |
| FLOWISE_PASSWORD | Password to login | String | |
| DEBUG | Print logs from components | Boolean | |
| LOG_PATH | Location where log files are stored | String | `your-path/Flowise/logs` |
| LOG_LEVEL | Different levels of logs | Enum String: `error`, `info`, `verbose`, `debug` | `info` |
| APIKEY_PATH | Location where api keys are saved | String | `your-path/Flowise/packages/server` |
| EXECUTION_MODE | Whether predictions run in their own process or the main process | Enum String: `child`, `main` | `main` |
| TOOL_FUNCTION_BUILTIN_DEP | NodeJS built-in modules to be used for Tool Function | String | |
| TOOL_FUNCTION_EXTERNAL_DEP | External modules to be used for Tool Function | String | |
| OVERRIDE_DATABASE | Override current database with default | Enum String: `true`, `false` | `true` |
| DATABASE_TYPE | Type of database to store the flowise data | Enum String: `sqlite`, `mysql`, `postgres` | `sqlite` |
| DATABASE_PATH | Location where database is saved (When DATABASE_TYPE is sqlite) | String | `your-home-dir/.flowise` |
| DATABASE_HOST | Host URL or IP address (When DATABASE_TYPE is not sqlite) | String | |
| DATABASE_PORT | Database port (When DATABASE_TYPE is not sqlite) | String | |
| DATABASE_USERNAME | Database username (When DATABASE_TYPE is not sqlite) | String | |
| DATABASE_PASSWORD | Database password (When DATABASE_TYPE is not sqlite) | String | |
| DATABASE_NAME | Database name (When DATABASE_TYPE is not sqlite) | String | |
| PASSPHRASE | Passphrase used to create encryption key | String | `MYPASSPHRASE` |
| SECRETKEY_PATH | Location where encryption key (used to encrypt/decrypt credentials) is saved | String | `your-path/Flowise/packages/server` |
You can also specify the env variables when using `npx`. For example:
```
npx flowise start --PORT=3000 --DEBUG=true
```
## 📖 Contribute to Docs
In-Progress
[Flowise Docs](https://github.com/FlowiseAI/FlowiseDocs)
## 🏷️ Pull Request process

View File

@ -1,12 +1,19 @@
<!-- markdownlint-disable MD030 -->
# Flowise
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.png?raw=true"></a>
# Flowise - Build LLM Apps Easily
[![Release Notes](https://img.shields.io/github/release/FlowiseAI/Flowise)](https://github.com/FlowiseAI/Flowise/releases)
[![Discord](https://img.shields.io/discord/1087698854775881778?label=Discord&logo=discord)](https://discord.gg/jbaHfsRVBW)
[![Twitter Follow](https://img.shields.io/twitter/follow/FlowiseAI?style=social)](https://twitter.com/FlowiseAI)
[![GitHub star chart](https://img.shields.io/github/stars/FlowiseAI/Flowise?style=social)](https://star-history.com/#FlowiseAI/Flowise)
[![GitHub fork](https://img.shields.io/github/forks/FlowiseAI/Flowise?style=social)](https://github.com/FlowiseAI/Flowise/fork)
<h3>Drag & drop UI to build your customized LLM flow</h3>
<a href="https://github.com/FlowiseAI/Flowise">
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.gif?raw=true"></a>
Drag & drop UI to build your customized LLM flow
## ⚡Quick Start
Download and Install [NodeJS](https://nodejs.org/en/download) >= 18.15.0
@ -128,27 +135,7 @@ FLOWISE_PASSWORD=1234
## 🌱 Env Variables
Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder.
| Variable | Description | Type | Default |
| ---------------- | ---------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- |
| PORT | The HTTP port Flowise runs on | Number | 3000 |
| PASSPHRASE | Passphrase used to create encryption key | String | `MYPASSPHRASE` |
| FLOWISE_USERNAME | Username to login | String |
| FLOWISE_PASSWORD | Password to login | String |
| DEBUG | Print logs from components | Boolean |
| LOG_PATH | Location where log files are stored | String | `your/path/Flowise/logs` |
| LOG_LEVEL | Different levels of logs | Enum String: `error`, `info`, `verbose`, `debug` | `info` |
| DATABASE_PATH | Location where database is saved | String | `your/home/dir/.flowise` |
| APIKEY_PATH | Location where api keys are saved | String | `your/path/Flowise/packages/server` |
| SECRETKEY_PATH | Location where encryption key (used to encrypt/decrypt credentials) is saved | String | `your/path/Flowise/packages/server` |
| EXECUTION_MODE | Whether predictions run in their own process or the main process | Enum String: `child`, `main` | `main` |
You can also specify the env variables when using `npx`. For example:
```
npx flowise start --PORT=3000 --DEBUG=true
```
Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder. Read [more](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)
## 📖 Documentation
@ -182,6 +169,12 @@ Feel free to ask any questions, raise problems, and request new features in [dis
## 🙌 Contributing
Thanks go to these awesome contributors
<a href="https://github.com/FlowiseAI/Flowise/graphs/contributors">
<img src="https://contrib.rocks/image?repo=FlowiseAI/Flowise" />
</a>
See [contributing guide](CONTRIBUTING.md). Reach out to us at [Discord](https://discord.gg/jbaHfsRVBW) if you have any questions or issues.
[![Star History Chart](https://api.star-history.com/svg?repos=FlowiseAI/Flowise&type=Timeline)](https://star-history.com/#FlowiseAI/Flowise&Date)

36
artillery-load-test.yml Normal file
View File

@ -0,0 +1,36 @@
# npm install -g artillery@latest
# artillery run artillery-load-test.yml
# Refer https://www.artillery.io/docs
config:
target: http://128.128.128.128:3000 # replace with your url
phases:
- duration: 1
arrivalRate: 1
rampTo: 2
name: Warm up phase
- duration: 1
arrivalRate: 2
rampTo: 3
name: Ramp up load
- duration: 1
arrivalRate: 3
name: Sustained peak load
scenarios:
- flow:
- loop:
- post:
url: '/api/v1/prediction/chatflow-id' # replace with your chatflowid
json:
question: 'hello' # replace with your question
count: 1 # how many request each user make
# User __
# 3 /
# 2 /
# 1 _/
# 1 2 3
# Seconds
# Total Users = 2 + 3 + 3 = 8
# Each making 1 HTTP call
# Over a duration of 3 seconds

View File

@ -1,11 +1,22 @@
PORT=3000
PASSPHRASE=MYPASSPHRASE # Passphrase used to create encryption key
DATABASE_PATH=/root/.flowise
APIKEY_PATH=/root/.flowise
SECRETKEY_PATH=/root/.flowise
LOG_PATH=/root/.flowise/logs
# DATABASE_TYPE=postgres
# DATABASE_PORT=""
# DATABASE_HOST=""
# DATABASE_NAME="flowise"
# DATABASE_USER=""
# DATABASE_PASSWORD=""
# OVERRIDE_DATABASE=true
# FLOWISE_USERNAME=user
# FLOWISE_PASSWORD=1234
# DEBUG=true
# DATABASE_PATH=/your/database/path/.flowise
# APIKEY_PATH=/your/api/key/path
# SECRETKEY_PATH=/your/secret/key/path
# LOG_PATH=/your/log/path
# LOG_LEVEL=debug (error | warn | info | verbose | debug)
# EXECUTION_MODE=main (child | main)
# TOOL_FUNCTION_BUILTIN_DEP=crypto,fs
# TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash

View File

@ -9,7 +9,7 @@ Starts Flowise from [DockerHub Image](https://hub.docker.com/repository/docker/f
3. Open [http://localhost:3000](http://localhost:3000)
4. You can bring the containers down by `docker-compose stop`
## With Authrorization
## 🔒 Authentication
1. Create `.env` file and specify the `PORT`, `FLOWISE_USERNAME`, and `FLOWISE_PASSWORD` (refer to `.env.example`)
2. Pass `FLOWISE_USERNAME` and `FLOWISE_PASSWORD` to the `docker-compose.yml` file:
@ -22,3 +22,14 @@ Starts Flowise from [DockerHub Image](https://hub.docker.com/repository/docker/f
3. `docker-compose up -d`
4. Open [http://localhost:3000](http://localhost:3000)
5. You can bring the containers down by `docker-compose stop`
## 🌱 Env Variables
If you like to persist your data (flows, logs, apikeys, credentials), set these variables in the `.env` file inside `docker` folder:
- DATABASE_PATH=/root/.flowise
- APIKEY_PATH=/root/.flowise
- LOG_PATH=/root/.flowise/logs
- SECRETKEY_PATH=/root/.flowise
Flowise also support different environment variables to configure your instance. Read [more](https://docs.flowiseai.com/environment-variables)

View File

@ -9,13 +9,13 @@ services:
- PASSPHRASE=${PASSPHRASE}
- FLOWISE_USERNAME=${FLOWISE_USERNAME}
- FLOWISE_PASSWORD=${FLOWISE_PASSWORD}
- DEBUG=${DEBUG}
- DATABASE_PATH=${DATABASE_PATH}
- APIKEY_PATH=${APIKEY_PATH}
- SECRETKEY_PATH=${SECRETKEY_PATH}
- LOG_LEVEL=${LOG_LEVEL}
- LOG_PATH=${LOG_PATH}
- EXECUTION_MODE=${EXECUTION_MODE}
- DEBUG=${DEBUG}
ports:
- '${PORT}:${PORT}'
volumes:

BIN
images/flowise.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 524 KiB

View File

@ -1,6 +1,6 @@
{
"name": "flowise",
"version": "1.2.15",
"version": "1.2.16",
"private": true,
"homepage": "https://flowiseai.com",
"workspaces": [
@ -28,7 +28,6 @@
"*.{js,jsx,ts,tsx,json,md}": "eslint --fix"
},
"devDependencies": {
"turbo": "1.7.4",
"@babel/preset-env": "^7.19.4",
"@babel/preset-typescript": "7.18.6",
"@types/express": "^4.17.13",
@ -48,6 +47,7 @@
"pretty-quick": "^3.1.3",
"rimraf": "^3.0.2",
"run-script-os": "^1.1.6",
"turbo": "1.7.4",
"typescript": "^4.8.4"
},
"engines": {

View File

@ -0,0 +1,25 @@
import { INodeParams, INodeCredential } from '../src/Interface'
class AirtableApi implements INodeCredential {
label: string
name: string
description: string
inputs: INodeParams[]
constructor() {
this.label = 'Airtable API'
this.name = 'airtableApi'
this.description =
'Refer to <a target="_blank" href="https://support.airtable.com/docs/creating-and-using-api-keys-and-access-tokens">official guide</a> on how to get accessToken on Airtable'
this.inputs = [
{
label: 'Access Token',
name: 'accessToken',
type: 'password',
placeholder: '<AIRTABLE_ACCESS_TOKEN>'
}
]
}
}
module.exports = { credClass: AirtableApi }

View File

@ -0,0 +1,22 @@
import { INodeParams, INodeCredential } from '../src/Interface'
class BraveSearchApi implements INodeCredential {
label: string
name: string
description: string
inputs: INodeParams[]
constructor() {
this.label = 'Brave Search API'
this.name = 'braveSearchApi'
this.inputs = [
{
label: 'BraveSearch Api Key',
name: 'braveApiKey',
type: 'password'
}
]
}
}
module.exports = { credClass: BraveSearchApi }

View File

@ -0,0 +1,21 @@
import { INodeParams, INodeCredential } from '../src/Interface'
class ReplicateApi implements INodeCredential {
label: string
name: string
inputs: INodeParams[]
constructor() {
this.label = 'Replicate API'
this.name = 'replicateApi'
this.inputs = [
{
label: 'Replicate Api Key',
name: 'replicateApiKey',
type: 'password'
}
]
}
}
module.exports = { credClass: ReplicateApi }

View File

@ -0,0 +1,29 @@
import { INodeParams, INodeCredential } from '../src/Interface'
class SingleStoreApi implements INodeCredential {
label: string
name: string
description: string
inputs: INodeParams[]
constructor() {
this.label = 'SingleStore API'
this.name = 'singleStoreApi'
this.inputs = [
{
label: 'User',
name: 'user',
type: 'string',
placeholder: '<SINGLESTORE_USERNAME>'
},
{
label: 'Password',
name: 'password',
type: 'password',
placeholder: '<SINGLESTORE_PASSWORD>'
}
]
}
}
module.exports = { credClass: SingleStoreApi }

View File

@ -0,0 +1,231 @@
import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface'
import { AgentExecutor } from 'langchain/agents'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { LoadPyodide, finalSystemPrompt, systemPrompt } from './core'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
import axios from 'axios'
class Airtable_Agents implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]
constructor() {
this.label = 'Airtable Agent'
this.name = 'airtableAgent'
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'airtable.svg'
this.description = 'Agent used to to answer queries on Airtable table'
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['airtableApi']
}
this.inputs = [
{
label: 'Language Model',
name: 'model',
type: 'BaseLanguageModel'
},
{
label: 'Base Id',
name: 'baseId',
type: 'string',
placeholder: 'app11RobdGoX0YNsC',
description:
'If your table URL looks like: https://airtable.com/app11RobdGoX0YNsC/tblJdmvbrgizbYICO/viw9UrP77Id0CE4ee, app11RovdGoX0YNsC is the base id'
},
{
label: 'Table Id',
name: 'tableId',
type: 'string',
placeholder: 'tblJdmvbrgizbYICO',
description:
'If your table URL looks like: https://airtable.com/app11RobdGoX0YNsC/tblJdmvbrgizbYICO/viw9UrP77Id0CE4ee, tblJdmvbrgizbYICO is the table id'
},
{
label: 'Return All',
name: 'returnAll',
type: 'boolean',
default: true,
additionalParams: true,
description: 'If all results should be returned or only up to a given limit'
},
{
label: 'Limit',
name: 'limit',
type: 'number',
default: 100,
step: 1,
additionalParams: true,
description: 'Number of results to return'
}
]
}
async init(): Promise<any> {
// Not used
return undefined
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const model = nodeData.inputs?.model as BaseLanguageModel
const baseId = nodeData.inputs?.baseId as string
const tableId = nodeData.inputs?.tableId as string
const returnAll = nodeData.inputs?.returnAll as boolean
const limit = nodeData.inputs?.limit as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const accessToken = getCredentialParam('accessToken', credentialData, nodeData)
let airtableData: ICommonObject[] = []
if (returnAll) {
airtableData = await loadAll(baseId, tableId, accessToken)
} else {
airtableData = await loadLimit(limit ? parseInt(limit, 10) : 100, baseId, tableId, accessToken)
}
let base64String = Buffer.from(JSON.stringify(airtableData)).toString('base64')
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const pyodide = await LoadPyodide()
// First load the csv file and get the dataframe dictionary of column types
// For example using titanic.csv: {'PassengerId': 'int64', 'Survived': 'int64', 'Pclass': 'int64', 'Name': 'object', 'Sex': 'object', 'Age': 'float64', 'SibSp': 'int64', 'Parch': 'int64', 'Ticket': 'object', 'Fare': 'float64', 'Cabin': 'object', 'Embarked': 'object'}
let dataframeColDict = ''
try {
const code = `import pandas as pd
import base64
import json
base64_string = "${base64String}"
decoded_data = base64.b64decode(base64_string)
json_data = json.loads(decoded_data)
df = pd.DataFrame(json_data)
my_dict = df.dtypes.astype(str).to_dict()
print(my_dict)
json.dumps(my_dict)`
dataframeColDict = await pyodide.runPythonAsync(code)
} catch (error) {
throw new Error(error)
}
// Then tell GPT to come out with ONLY python code
// For example: len(df), df[df['SibSp'] > 3]['PassengerId'].count()
let pythonCode = ''
if (dataframeColDict) {
const chain = new LLMChain({
llm: model,
prompt: PromptTemplate.fromTemplate(systemPrompt),
verbose: process.env.DEBUG === 'true' ? true : false
})
const inputs = {
dict: dataframeColDict,
question: input
}
const res = await chain.call(inputs, [loggerHandler])
pythonCode = res?.text
}
// Then run the code using Pyodide
let finalResult = ''
if (pythonCode) {
try {
const code = `import pandas as pd\n${pythonCode}`
finalResult = await pyodide.runPythonAsync(code)
} catch (error) {
throw new Error(`Sorry, I'm unable to find answer for question: "${input}" using follwoing code: "${pythonCode}"`)
}
}
// Finally, return a complete answer
if (finalResult) {
const chain = new LLMChain({
llm: model,
prompt: PromptTemplate.fromTemplate(finalSystemPrompt),
verbose: process.env.DEBUG === 'true' ? true : false
})
const inputs = {
question: input,
answer: finalResult
}
if (options.socketIO && options.socketIOClientId) {
const result = await chain.call(inputs, [loggerHandler, handler])
return result?.text
} else {
const result = await chain.call(inputs, [loggerHandler])
return result?.text
}
}
return pythonCode
}
}
interface AirtableLoaderResponse {
records: AirtableLoaderPage[]
offset?: string
}
interface AirtableLoaderPage {
id: string
createdTime: string
fields: ICommonObject
}
const fetchAirtableData = async (url: string, params: ICommonObject, accessToken: string): Promise<AirtableLoaderResponse> => {
try {
const headers = {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json',
Accept: 'application/json'
}
const response = await axios.get(url, { params, headers })
return response.data
} catch (error) {
throw new Error(`Failed to fetch ${url} from Airtable: ${error}`)
}
}
const loadAll = async (baseId: string, tableId: string, accessToken: string): Promise<ICommonObject[]> => {
const params: ICommonObject = { pageSize: 100 }
let data: AirtableLoaderResponse
let returnPages: AirtableLoaderPage[] = []
do {
data = await fetchAirtableData(`https://api.airtable.com/v0/${baseId}/${tableId}`, params, accessToken)
returnPages.push.apply(returnPages, data.records)
params.offset = data.offset
} while (data.offset !== undefined)
return data.records.map((page) => page.fields)
}
const loadLimit = async (limit: number, baseId: string, tableId: string, accessToken: string): Promise<ICommonObject[]> => {
const params = { maxRecords: limit }
const data = await fetchAirtableData(`https://api.airtable.com/v0/${baseId}/${tableId}`, params, accessToken)
if (data.records.length === 0) {
return []
}
return data.records.map((page) => page.fields)
}
module.exports = { nodeClass: Airtable_Agents }

View File

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="256px" height="215px" viewBox="0 0 256 215" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid">
<g>
<path d="M114.25873,2.70101695 L18.8604023,42.1756384 C13.5552723,44.3711638 13.6102328,51.9065311 18.9486282,54.0225085 L114.746142,92.0117514 C123.163769,95.3498757 132.537419,95.3498757 140.9536,92.0117514 L236.75256,54.0225085 C242.08951,51.9065311 242.145916,44.3711638 236.83934,42.1756384 L141.442459,2.70101695 C132.738459,-0.900338983 122.961284,-0.900338983 114.25873,2.70101695" fill="#FFBF00"></path>
<path d="M136.349071,112.756863 L136.349071,207.659101 C136.349071,212.173089 140.900664,215.263892 145.096461,213.600615 L251.844122,172.166219 C254.281184,171.200072 255.879376,168.845451 255.879376,166.224705 L255.879376,71.3224678 C255.879376,66.8084791 251.327783,63.7176768 247.131986,65.3809537 L140.384325,106.815349 C137.94871,107.781496 136.349071,110.136118 136.349071,112.756863" fill="#26B5F8"></path>
<path d="M111.422771,117.65355 L79.742409,132.949912 L76.5257763,134.504714 L9.65047684,166.548104 C5.4112904,168.593211 0.000578531073,165.503855 0.000578531073,160.794612 L0.000578531073,71.7210757 C0.000578531073,70.0173017 0.874160452,68.5463864 2.04568588,67.4384994 C2.53454463,66.9481944 3.08848814,66.5446689 3.66412655,66.2250305 C5.26231864,65.2661153 7.54173107,65.0101153 9.47981017,65.7766689 L110.890522,105.957098 C116.045234,108.002206 116.450206,115.225166 111.422771,117.65355" fill="#ED3049"></path>
<path d="M111.422771,117.65355 L79.742409,132.949912 L2.04568588,67.4384994 C2.53454463,66.9481944 3.08848814,66.5446689 3.66412655,66.2250305 C5.26231864,65.2661153 7.54173107,65.0101153 9.47981017,65.7766689 L110.890522,105.957098 C116.045234,108.002206 116.450206,115.225166 111.422771,117.65355" fill-opacity="0.25" fill="#000000"></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.9 KiB

View File

@ -0,0 +1,29 @@
import type { PyodideInterface } from 'pyodide'
import * as path from 'path'
import { getUserHome } from '../../../src/utils'
let pyodideInstance: PyodideInterface | undefined
export async function LoadPyodide(): Promise<PyodideInterface> {
if (pyodideInstance === undefined) {
const { loadPyodide } = await import('pyodide')
const obj: any = { packageCacheDir: path.join(getUserHome(), '.flowise', 'pyodideCacheDir') }
pyodideInstance = await loadPyodide(obj)
await pyodideInstance.loadPackage(['pandas', 'numpy'])
}
return pyodideInstance
}
export const systemPrompt = `You are working with a pandas dataframe in Python. The name of the dataframe is df.
The columns and data types of a dataframe are given below as a Python dictionary with keys showing column names and values showing the data types.
{dict}
I will ask question, and you will output the Python code using pandas dataframe to answer my question. Do not provide any explanations. Do not respond with anything except the output of the code.
Question: {question}
Output Code:`
export const finalSystemPrompt = `You are given the question: {question}. You have an answer to the question: {answer}. Rephrase the answer into a standalone answer.
Standalone Answer:`

View File

@ -0,0 +1,149 @@
import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface'
import { AgentExecutor } from 'langchain/agents'
import { getBaseClasses } from '../../../src/utils'
import { LoadPyodide, finalSystemPrompt, systemPrompt } from './core'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
class CSV_Agents implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'CSV Agent'
this.name = 'csvAgent'
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'csvagent.png'
this.description = 'Agent used to to answer queries on CSV data'
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.inputs = [
{
label: 'Csv File',
name: 'csvFile',
type: 'file',
fileType: '.csv'
},
{
label: 'Language Model',
name: 'model',
type: 'BaseLanguageModel'
}
]
}
async init(): Promise<any> {
// Not used
return undefined
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const csvFileBase64 = nodeData.inputs?.csvFile as string
const model = nodeData.inputs?.model as BaseLanguageModel
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
let files: string[] = []
if (csvFileBase64.startsWith('[') && csvFileBase64.endsWith(']')) {
files = JSON.parse(csvFileBase64)
} else {
files = [csvFileBase64]
}
let base64String = ''
for (const file of files) {
const splitDataURI = file.split(',')
splitDataURI.pop()
base64String = splitDataURI.pop() ?? ''
}
const pyodide = await LoadPyodide()
// First load the csv file and get the dataframe dictionary of column types
// For example using titanic.csv: {'PassengerId': 'int64', 'Survived': 'int64', 'Pclass': 'int64', 'Name': 'object', 'Sex': 'object', 'Age': 'float64', 'SibSp': 'int64', 'Parch': 'int64', 'Ticket': 'object', 'Fare': 'float64', 'Cabin': 'object', 'Embarked': 'object'}
let dataframeColDict = ''
try {
const code = `import pandas as pd
import base64
from io import StringIO
import json
base64_string = "${base64String}"
decoded_data = base64.b64decode(base64_string)
csv_data = StringIO(decoded_data.decode('utf-8'))
df = pd.read_csv(csv_data)
my_dict = df.dtypes.astype(str).to_dict()
print(my_dict)
json.dumps(my_dict)`
dataframeColDict = await pyodide.runPythonAsync(code)
} catch (error) {
throw new Error(error)
}
// Then tell GPT to come out with ONLY python code
// For example: len(df), df[df['SibSp'] > 3]['PassengerId'].count()
let pythonCode = ''
if (dataframeColDict) {
const chain = new LLMChain({
llm: model,
prompt: PromptTemplate.fromTemplate(systemPrompt),
verbose: process.env.DEBUG === 'true' ? true : false
})
const inputs = {
dict: dataframeColDict,
question: input
}
const res = await chain.call(inputs, [loggerHandler])
pythonCode = res?.text
}
// Then run the code using Pyodide
let finalResult = ''
if (pythonCode) {
try {
const code = `import pandas as pd\n${pythonCode}`
finalResult = await pyodide.runPythonAsync(code)
} catch (error) {
throw new Error(`Sorry, I'm unable to find answer for question: "${input}" using follwoing code: "${pythonCode}"`)
}
}
// Finally, return a complete answer
if (finalResult) {
const chain = new LLMChain({
llm: model,
prompt: PromptTemplate.fromTemplate(finalSystemPrompt),
verbose: process.env.DEBUG === 'true' ? true : false
})
const inputs = {
question: input,
answer: finalResult
}
if (options.socketIO && options.socketIOClientId) {
const result = await chain.call(inputs, [loggerHandler, handler])
return result?.text
} else {
const result = await chain.call(inputs, [loggerHandler])
return result?.text
}
}
return pythonCode
}
}
module.exports = { nodeClass: CSV_Agents }

View File

@ -0,0 +1,29 @@
import type { PyodideInterface } from 'pyodide'
import * as path from 'path'
import { getUserHome } from '../../../src/utils'
let pyodideInstance: PyodideInterface | undefined
export async function LoadPyodide(): Promise<PyodideInterface> {
if (pyodideInstance === undefined) {
const { loadPyodide } = await import('pyodide')
const obj: any = { packageCacheDir: path.join(getUserHome(), '.flowise', 'pyodideCacheDir') }
pyodideInstance = await loadPyodide(obj)
await pyodideInstance.loadPackage(['pandas', 'numpy'])
}
return pyodideInstance
}
export const systemPrompt = `You are working with a pandas dataframe in Python. The name of the dataframe is df.
The columns and data types of a dataframe are given below as a Python dictionary with keys showing column names and values showing the data types.
{dict}
I will ask question, and you will output the Python code using pandas dataframe to answer my question. Do not provide any explanations. Do not respond with anything except the output of the code.
Question: {question}
Output Code:`
export const finalSystemPrompt = `You are given the question: {question}. You have an answer to the question: {answer}. Rephrase the answer into a standalone answer.
Standalone Answer:`

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

@ -22,7 +22,7 @@ class OpenAIFunctionAgent_Agents implements INode {
this.name = 'openAIFunctionAgent'
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'openai.svg'
this.icon = 'openai.png'
this.description = `An agent that uses OpenAI's Function Calling functionality to pick the tool and args to call`
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.inputs = [

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0,0,256,256" width="96px" height="96px" fill-rule="nonzero"><g fill="#00a67e" fill-rule="nonzero" stroke="none" stroke-width="1" stroke-linecap="butt" stroke-linejoin="miter" stroke-miterlimit="10" stroke-dasharray="" stroke-dashoffset="0" font-family="none" font-weight="none" font-size="none" text-anchor="none" style="mix-blend-mode: normal"><g transform="scale(10.66667,10.66667)"><path d="M11.13477,1.01758c-0.26304,-0.01259 -0.528,-0.00875 -0.79687,0.01563c-2.22436,0.20069 -4.00167,1.76087 -4.72852,3.78711c-1.71233,0.35652 -3.1721,1.48454 -3.9375,3.13672c-0.93789,2.02702 -0.47459,4.34373 0.91602,5.98633c-0.54763,1.66186 -0.30227,3.49111 0.74414,4.97852c1.28618,1.82589 3.52454,2.58282 5.64258,2.19922c1.16505,1.30652 2.87295,2.00936 4.6875,1.8457c2.22441,-0.2007 4.0017,-1.7608 4.72852,-3.78711c1.71235,-0.35654 3.17321,-1.4837 3.93945,-3.13672c0.93809,-2.0267 0.47529,-4.34583 -0.91602,-5.98828c0.54663,-1.66125 0.29983,-3.48985 -0.74609,-4.97656c-1.28618,-1.82589 -3.52454,-2.58282 -5.64258,-2.19922c-0.99242,-1.11291 -2.37852,-1.78893 -3.89062,-1.86133zM11.02539,2.51367c0.89653,0.03518 1.7296,0.36092 2.40625,0.9082c-0.11306,0.05604 -0.23154,0.09454 -0.3418,0.1582l-4.01367,2.31641c-0.306,0.176 -0.496,0.50247 -0.5,0.85547l-0.05859,5.48633l-1.76758,-1.04883v-4.4043c0,-2.136 1.55759,-4.04291 3.68359,-4.25391c0.19937,-0.01975 0.39689,-0.02523 0.5918,-0.01758zM16.125,4.25586c1.27358,0.00756 2.51484,0.5693 3.29297,1.6543c0.65289,0.90943 0.89227,1.99184 0.72852,3.03711c-0.10507,-0.06991 -0.19832,-0.15312 -0.30859,-0.2168l-4.01172,-2.31641c-0.306,-0.176 -0.68224,-0.17886 -0.99023,-0.00586l-4.7832,2.69531l0.02344,-2.05469l3.81445,-2.20117c0.69375,-0.4005 1.47022,-0.59633 2.23438,-0.5918zM5.2832,6.47266c-0.008,0.12587 -0.0332,0.24774 -0.0332,0.375v4.63281c0,0.353 0.18623,0.67938 0.49023,0.85938l4.72461,2.79688l-1.79102,1.00586l-3.81445,-2.20312c-1.85,-1.068 -2.7228,-3.37236 -1.8418,-5.31836c0.46198,-1.02041 1.27879,-1.76751 2.26562,-2.14844zM15.32617,7.85742l3.81445,2.20313c1.85,1.068 2.72475,3.37236 1.84375,5.31836c-0.46209,1.02065 -1.28043,1.7676 -2.26758,2.14844c0.00797,-0.12565 0.0332,-0.24797 0.0332,-0.375v-4.63086c0,-0.354 -0.18623,-0.68133 -0.49023,-0.86133l-4.72461,-2.79687zM12.02539,9.71094l1.96875,1.16797l-0.02734,2.28906l-1.99219,1.11914l-1.96875,-1.16601l0.02539,-2.28906zM15.48242,11.76172l1.76758,1.04883v4.4043c0,2.136 -1.55759,4.04291 -3.68359,4.25391c-1.11644,0.11059 -2.17429,-0.22435 -2.99805,-0.89062c0.11306,-0.05604 0.23154,-0.09454 0.3418,-0.1582l4.01367,-2.31641c0.306,-0.176 0.496,-0.50247 0.5,-0.85547zM13.94727,14.89648l-0.02344,2.05469l-3.81445,2.20117c-1.85,1.068 -4.28234,0.6735 -5.52734,-1.0625c-0.65289,-0.90943 -0.89227,-1.99184 -0.72852,-3.03711c0.10521,0.07006 0.19816,0.15299 0.30859,0.2168l4.01172,2.31641c0.306,0.176 0.68223,0.17886 0.99023,0.00586z"></path></g></g></svg>

Before

Width:  |  Height:  |  Size: 2.9 KiB

View File

@ -1,32 +1,21 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ConversationalRetrievalQAChain } from 'langchain/chains'
import { AIMessage, BaseRetriever, HumanMessage } from 'langchain/schema'
import { BaseChatMemory, BufferMemory, ChatMessageHistory } from 'langchain/memory'
import { ConversationalRetrievalQAChain, QAChainParams } from 'langchain/chains'
import { AIMessage, HumanMessage } from 'langchain/schema'
import { BaseRetriever } from 'langchain/schema/retriever'
import { BaseChatMemory, BufferMemory, ChatMessageHistory, BufferMemoryInput } from 'langchain/memory'
import { PromptTemplate } from 'langchain/prompts'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
const default_qa_template = `Use the following pieces of context to answer the question at the end, in its original language. If you don't know the answer, just say that you don't know in its original language, don't try to make up an answer.
{context}
Question: {question}
Helpful Answer:`
const qa_template = `Use the following pieces of context to answer the question at the end, in its original language.
{context}
Question: {question}
Helpful Answer:`
const CUSTOM_QUESTION_GENERATOR_CHAIN_PROMPT = `Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language. include it in the standalone question.
Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question:`
import {
default_map_reduce_template,
default_qa_template,
qa_template,
map_reduce_template,
CUSTOM_QUESTION_GENERATOR_CHAIN_PROMPT,
refine_question_template,
refine_template
} from './prompts'
class ConversationalRetrievalQAChain_Chains implements INode {
label: string
@ -60,9 +49,9 @@ class ConversationalRetrievalQAChain_Chains implements INode {
{
label: 'Memory',
name: 'memory',
type: 'DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory',
type: 'BaseMemory',
optional: true,
description: 'If no memory connected, default BufferMemory will be used'
description: 'If left empty, a default BufferMemory will be used'
},
{
label: 'Return Source Documents',
@ -118,28 +107,54 @@ class ConversationalRetrievalQAChain_Chains implements INode {
const obj: any = {
verbose: process.env.DEBUG === 'true' ? true : false,
qaChainOptions: {
type: 'stuff',
prompt: PromptTemplate.fromTemplate(systemMessagePrompt ? `${systemMessagePrompt}\n${qa_template}` : default_qa_template)
},
questionGeneratorChainOptions: {
template: CUSTOM_QUESTION_GENERATOR_CHAIN_PROMPT
}
}
if (returnSourceDocuments) obj.returnSourceDocuments = returnSourceDocuments
if (chainOption) obj.qaChainOptions = { ...obj.qaChainOptions, type: chainOption }
if (chainOption === 'map_reduce') {
obj.qaChainOptions = {
type: 'map_reduce',
combinePrompt: PromptTemplate.fromTemplate(
systemMessagePrompt ? `${systemMessagePrompt}\n${map_reduce_template}` : default_map_reduce_template
)
} as QAChainParams
} else if (chainOption === 'refine') {
const qprompt = new PromptTemplate({
inputVariables: ['context', 'question'],
template: refine_question_template(systemMessagePrompt)
})
const rprompt = new PromptTemplate({
inputVariables: ['context', 'question', 'existing_answer'],
template: refine_template
})
obj.qaChainOptions = {
type: 'refine',
questionPrompt: qprompt,
refinePrompt: rprompt
} as QAChainParams
} else {
obj.qaChainOptions = {
type: 'stuff',
prompt: PromptTemplate.fromTemplate(systemMessagePrompt ? `${systemMessagePrompt}\n${qa_template}` : default_qa_template)
} as QAChainParams
}
if (memory) {
memory.inputKey = 'question'
memory.outputKey = 'text'
memory.memoryKey = 'chat_history'
if (chainOption === 'refine') memory.outputKey = 'output_text'
else memory.outputKey = 'text'
obj.memory = memory
} else {
obj.memory = new BufferMemory({
const fields: BufferMemoryInput = {
memoryKey: 'chat_history',
inputKey: 'question',
outputKey: 'text',
returnMessages: true
})
}
if (chainOption === 'refine') fields.outputKey = 'output_text'
else fields.outputKey = 'text'
obj.memory = new BufferMemory(fields)
}
const chain = ConversationalRetrievalQAChain.fromLLM(model, vectorStoreRetriever, obj)
@ -150,6 +165,7 @@ class ConversationalRetrievalQAChain_Chains implements INode {
const chain = nodeData.instance as ConversationalRetrievalQAChain
const returnSourceDocuments = nodeData.inputs?.returnSourceDocuments as boolean
const memory = nodeData.inputs?.memory
const chainOption = nodeData.inputs?.chainOption as string
let model = nodeData.inputs?.model
@ -179,8 +195,22 @@ class ConversationalRetrievalQAChain_Chains implements INode {
const loggerHandler = new ConsoleCallbackHandler(options.logger)
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, undefined, returnSourceDocuments)
const handler = new CustomChainHandler(
options.socketIO,
options.socketIOClientId,
chainOption === 'refine' ? 4 : undefined,
returnSourceDocuments
)
const res = await chain.call(obj, [loggerHandler, handler])
if (chainOption === 'refine') {
if (res.output_text && res.sourceDocuments) {
return {
text: res.output_text,
sourceDocuments: res.sourceDocuments
}
}
return res?.output_text
}
if (res.text && res.sourceDocuments) return res
return res?.text
} else {

View File

@ -0,0 +1,64 @@
export const default_qa_template = `Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer.
{context}
Question: {question}
Helpful Answer:`
export const qa_template = `Use the following pieces of context to answer the question at the end.
{context}
Question: {question}
Helpful Answer:`
export const default_map_reduce_template = `Given the following extracted parts of a long document and a question, create a final answer.
If you don't know the answer, just say that you don't know. Don't try to make up an answer.
{summaries}
Question: {question}
Helpful Answer:`
export const map_reduce_template = `Given the following extracted parts of a long document and a question, create a final answer.
{summaries}
Question: {question}
Helpful Answer:`
export const refine_question_template = (sysPrompt?: string) => {
let returnPrompt = ''
if (sysPrompt)
returnPrompt = `Context information is below.
---------------------
{context}
---------------------
Given the context information and not prior knowledge, ${sysPrompt}
Answer the question: {question}.
Answer:`
if (!sysPrompt)
returnPrompt = `Context information is below.
---------------------
{context}
---------------------
Given the context information and not prior knowledge, answer the question: {question}.
Answer:`
return returnPrompt
}
export const refine_template = `The original question is as follows: {question}
We have provided an existing answer: {existing_answer}
We have the opportunity to refine the existing answer (only if needed) with some more context below.
------------
{context}
------------
Given the new context, refine the original answer to better answer the question.
If you can't find answer from the context, return the original answer.`
export const CUSTOM_QUESTION_GENERATOR_CHAIN_PROMPT = `Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, answer in the same language as the follow up question. include it in the standalone question.
Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question:`

View File

@ -1,5 +1,5 @@
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { getBaseClasses, handleEscapeCharacters } from '../../../src/utils'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
@ -73,7 +73,12 @@ class LLMChain_Chains implements INode {
console.log('\x1b[92m\x1b[1m\n*****OUTPUT PREDICTION*****\n\x1b[0m\x1b[0m')
// eslint-disable-next-line no-console
console.log(res)
return res
/**
* Apply string transformation to convert special chars:
* FROM: hello i am ben\n\n\thow are you?
* TO: hello i am benFLOWISE_NEWLINEFLOWISE_NEWLINEFLOWISE_TABhow are you?
*/
return handleEscapeCharacters(res, false)
}
}
@ -81,7 +86,6 @@ class LLMChain_Chains implements INode {
const inputVariables = nodeData.instance.prompt.inputVariables as string[] // ["product"]
const chain = nodeData.instance as LLMChain
const promptValues = nodeData.inputs?.prompt.promptValues as ICommonObject
const res = await runPrediction(inputVariables, chain, input, promptValues, options)
// eslint-disable-next-line no-console
console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m')
@ -95,7 +99,7 @@ const runPrediction = async (
inputVariables: string[],
chain: LLMChain,
input: string,
promptValues: ICommonObject,
promptValuesRaw: ICommonObject,
options: ICommonObject
) => {
const loggerHandler = new ConsoleCallbackHandler(options.logger)
@ -103,6 +107,13 @@ const runPrediction = async (
const socketIO = isStreaming ? options.socketIO : undefined
const socketIOClientId = isStreaming ? options.socketIOClientId : ''
/**
* Apply string transformation to reverse converted special chars:
* FROM: { "value": "hello i am benFLOWISE_NEWLINEFLOWISE_NEWLINEFLOWISE_TABhow are you?" }
* TO: { "value": "hello i am ben\n\n\thow are you?" }
*/
const promptValues = handleEscapeCharacters(promptValuesRaw, true)
if (inputVariables.length === 1) {
if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId)

View File

@ -1,6 +1,6 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { RetrievalQAChain } from 'langchain/chains'
import { BaseRetriever } from 'langchain/schema'
import { BaseRetriever } from 'langchain/schema/retriever'
import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'

View File

@ -17,7 +17,7 @@ class ChatOpenAI_ChatModels implements INode {
this.label = 'ChatOpenAI'
this.name = 'chatOpenAI'
this.type = 'ChatOpenAI'
this.icon = 'openai.svg'
this.icon = 'openai.png'
this.category = 'Chat Models'
this.description = 'Wrapper around OpenAI large language models that use the Chat endpoint'
this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)]

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0,0,256,256" width="96px" height="96px" fill-rule="nonzero"><g fill="#00a67e" fill-rule="nonzero" stroke="none" stroke-width="1" stroke-linecap="butt" stroke-linejoin="miter" stroke-miterlimit="10" stroke-dasharray="" stroke-dashoffset="0" font-family="none" font-weight="none" font-size="none" text-anchor="none" style="mix-blend-mode: normal"><g transform="scale(10.66667,10.66667)"><path d="M11.13477,1.01758c-0.26304,-0.01259 -0.528,-0.00875 -0.79687,0.01563c-2.22436,0.20069 -4.00167,1.76087 -4.72852,3.78711c-1.71233,0.35652 -3.1721,1.48454 -3.9375,3.13672c-0.93789,2.02702 -0.47459,4.34373 0.91602,5.98633c-0.54763,1.66186 -0.30227,3.49111 0.74414,4.97852c1.28618,1.82589 3.52454,2.58282 5.64258,2.19922c1.16505,1.30652 2.87295,2.00936 4.6875,1.8457c2.22441,-0.2007 4.0017,-1.7608 4.72852,-3.78711c1.71235,-0.35654 3.17321,-1.4837 3.93945,-3.13672c0.93809,-2.0267 0.47529,-4.34583 -0.91602,-5.98828c0.54663,-1.66125 0.29983,-3.48985 -0.74609,-4.97656c-1.28618,-1.82589 -3.52454,-2.58282 -5.64258,-2.19922c-0.99242,-1.11291 -2.37852,-1.78893 -3.89062,-1.86133zM11.02539,2.51367c0.89653,0.03518 1.7296,0.36092 2.40625,0.9082c-0.11306,0.05604 -0.23154,0.09454 -0.3418,0.1582l-4.01367,2.31641c-0.306,0.176 -0.496,0.50247 -0.5,0.85547l-0.05859,5.48633l-1.76758,-1.04883v-4.4043c0,-2.136 1.55759,-4.04291 3.68359,-4.25391c0.19937,-0.01975 0.39689,-0.02523 0.5918,-0.01758zM16.125,4.25586c1.27358,0.00756 2.51484,0.5693 3.29297,1.6543c0.65289,0.90943 0.89227,1.99184 0.72852,3.03711c-0.10507,-0.06991 -0.19832,-0.15312 -0.30859,-0.2168l-4.01172,-2.31641c-0.306,-0.176 -0.68224,-0.17886 -0.99023,-0.00586l-4.7832,2.69531l0.02344,-2.05469l3.81445,-2.20117c0.69375,-0.4005 1.47022,-0.59633 2.23438,-0.5918zM5.2832,6.47266c-0.008,0.12587 -0.0332,0.24774 -0.0332,0.375v4.63281c0,0.353 0.18623,0.67938 0.49023,0.85938l4.72461,2.79688l-1.79102,1.00586l-3.81445,-2.20312c-1.85,-1.068 -2.7228,-3.37236 -1.8418,-5.31836c0.46198,-1.02041 1.27879,-1.76751 2.26562,-2.14844zM15.32617,7.85742l3.81445,2.20313c1.85,1.068 2.72475,3.37236 1.84375,5.31836c-0.46209,1.02065 -1.28043,1.7676 -2.26758,2.14844c0.00797,-0.12565 0.0332,-0.24797 0.0332,-0.375v-4.63086c0,-0.354 -0.18623,-0.68133 -0.49023,-0.86133l-4.72461,-2.79687zM12.02539,9.71094l1.96875,1.16797l-0.02734,2.28906l-1.99219,1.11914l-1.96875,-1.16601l0.02539,-2.28906zM15.48242,11.76172l1.76758,1.04883v4.4043c0,2.136 -1.55759,4.04291 -3.68359,4.25391c-1.11644,0.11059 -2.17429,-0.22435 -2.99805,-0.89062c0.11306,-0.05604 0.23154,-0.09454 0.3418,-0.1582l4.01367,-2.31641c0.306,-0.176 0.496,-0.50247 0.5,-0.85547zM13.94727,14.89648l-0.02344,2.05469l-3.81445,2.20117c-1.85,1.068 -4.28234,0.6735 -5.52734,-1.0625c-0.65289,-0.90943 -0.89227,-1.99184 -0.72852,-3.03711c0.10521,0.07006 0.19816,0.15299 0.30859,0.2168l4.01172,2.31641c0.306,0.176 0.68223,0.17886 0.99023,0.00586z"></path></g></g></svg>

Before

Width:  |  Height:  |  Size: 2.9 KiB

View File

@ -0,0 +1,198 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { TextSplitter } from 'langchain/text_splitter'
import { BaseDocumentLoader } from 'langchain/document_loaders/base'
import { Document } from 'langchain/document'
import axios, { AxiosRequestConfig } from 'axios'
class API_DocumentLoaders implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs?: INodeParams[]
constructor() {
this.label = 'API Loader'
this.name = 'apiLoader'
this.type = 'Document'
this.icon = 'api-loader.png'
this.category = 'Document Loaders'
this.description = `Load data from an API`
this.baseClasses = [this.type]
this.inputs = [
{
label: 'Text Splitter',
name: 'textSplitter',
type: 'TextSplitter',
optional: true
},
{
label: 'Method',
name: 'method',
type: 'options',
options: [
{
label: 'GET',
name: 'GET'
},
{
label: 'POST',
name: 'POST'
}
]
},
{
label: 'URL',
name: 'url',
type: 'string'
},
{
label: 'Headers',
name: 'headers',
type: 'json',
additionalParams: true,
optional: true
},
{
label: 'Body',
name: 'body',
type: 'json',
description:
'JSON body for the POST request. If not specified, agent will try to figure out itself from AIPlugin if provided',
additionalParams: true,
optional: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const headers = nodeData.inputs?.headers as string
const url = nodeData.inputs?.url as string
const body = nodeData.inputs?.body as string
const method = nodeData.inputs?.method as string
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
const metadata = nodeData.inputs?.metadata
const options: ApiLoaderParams = {
url,
method
}
if (headers) {
const parsedHeaders = typeof headers === 'object' ? headers : JSON.parse(headers)
options.headers = parsedHeaders
}
if (body) {
const parsedBody = typeof body === 'object' ? body : JSON.parse(body)
options.body = parsedBody
}
const loader = new ApiLoader(options)
let docs = []
if (textSplitter) {
docs = await loader.loadAndSplit(textSplitter)
} else {
docs = await loader.load()
}
if (metadata) {
const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata)
let finaldocs = []
for (const doc of docs) {
const newdoc = {
...doc,
metadata: {
...doc.metadata,
...parsedMetadata
}
}
finaldocs.push(newdoc)
}
return finaldocs
}
return docs
}
}
interface ApiLoaderParams {
url: string
method: string
headers?: ICommonObject
body?: ICommonObject
}
class ApiLoader extends BaseDocumentLoader {
public readonly url: string
public readonly headers?: ICommonObject
public readonly body?: ICommonObject
public readonly method: string
constructor({ url, headers, body, method }: ApiLoaderParams) {
super()
this.url = url
this.headers = headers
this.body = body
this.method = method
}
public async load(): Promise<Document[]> {
if (this.method === 'POST') {
return this.executePostRequest(this.url, this.headers, this.body)
} else {
return this.executeGetRequest(this.url, this.headers)
}
}
protected async executeGetRequest(url: string, headers?: ICommonObject): Promise<Document[]> {
try {
const config: AxiosRequestConfig = {}
if (headers) {
config.headers = headers
}
const response = await axios.get(url, config)
const responseJsonString = JSON.stringify(response.data, null, 2)
const doc = new Document({
pageContent: responseJsonString,
metadata: {
url
}
})
return [doc]
} catch (error) {
throw new Error(`Failed to fetch ${url}: ${error}`)
}
}
protected async executePostRequest(url: string, headers?: ICommonObject, body?: ICommonObject): Promise<Document[]> {
try {
const config: AxiosRequestConfig = {}
if (headers) {
config.headers = headers
}
const response = await axios.post(url, body ?? {}, config)
const responseJsonString = JSON.stringify(response.data, null, 2)
const doc = new Document({
pageContent: responseJsonString,
metadata: {
url
}
})
return [doc]
} catch (error) {
throw new Error(`Failed to post ${url}: ${error}`)
}
}
}
module.exports = {
nodeClass: API_DocumentLoaders
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@ -0,0 +1,229 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { TextSplitter } from 'langchain/text_splitter'
import { BaseDocumentLoader } from 'langchain/document_loaders/base'
import { Document } from 'langchain/document'
import axios from 'axios'
import { getCredentialData, getCredentialParam } from '../../../src/utils'
class Airtable_DocumentLoaders implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
credential: INodeParams
inputs?: INodeParams[]
constructor() {
this.label = 'Airtable'
this.name = 'airtable'
this.type = 'Document'
this.icon = 'airtable.svg'
this.category = 'Document Loaders'
this.description = `Load data from Airtable table`
this.baseClasses = [this.type]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['airtableApi']
}
this.inputs = [
{
label: 'Text Splitter',
name: 'textSplitter',
type: 'TextSplitter',
optional: true
},
{
label: 'Base Id',
name: 'baseId',
type: 'string',
placeholder: 'app11RobdGoX0YNsC',
description:
'If your table URL looks like: https://airtable.com/app11RobdGoX0YNsC/tblJdmvbrgizbYICO/viw9UrP77Id0CE4ee, app11RovdGoX0YNsC is the base id'
},
{
label: 'Table Id',
name: 'tableId',
type: 'string',
placeholder: 'tblJdmvbrgizbYICO',
description:
'If your table URL looks like: https://airtable.com/app11RobdGoX0YNsC/tblJdmvbrgizbYICO/viw9UrP77Id0CE4ee, tblJdmvbrgizbYICO is the table id'
},
{
label: 'Return All',
name: 'returnAll',
type: 'boolean',
default: true,
additionalParams: true,
description: 'If all results should be returned or only up to a given limit'
},
{
label: 'Limit',
name: 'limit',
type: 'number',
default: 100,
step: 1,
additionalParams: true,
description: 'Number of results to return'
},
{
label: 'Metadata',
name: 'metadata',
type: 'json',
optional: true,
additionalParams: true
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const baseId = nodeData.inputs?.baseId as string
const tableId = nodeData.inputs?.tableId as string
const returnAll = nodeData.inputs?.returnAll as boolean
const limit = nodeData.inputs?.limit as string
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
const metadata = nodeData.inputs?.metadata
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const accessToken = getCredentialParam('accessToken', credentialData, nodeData)
const airtableOptions: AirtableLoaderParams = {
baseId,
tableId,
returnAll,
accessToken,
limit: limit ? parseInt(limit, 10) : 100
}
const loader = new AirtableLoader(airtableOptions)
let docs = []
if (textSplitter) {
docs = await loader.loadAndSplit(textSplitter)
} else {
docs = await loader.load()
}
if (metadata) {
const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata)
let finaldocs = []
for (const doc of docs) {
const newdoc = {
...doc,
metadata: {
...doc.metadata,
...parsedMetadata
}
}
finaldocs.push(newdoc)
}
return finaldocs
}
return docs
}
}
interface AirtableLoaderParams {
baseId: string
tableId: string
accessToken: string
limit?: number
returnAll?: boolean
}
interface AirtableLoaderResponse {
records: AirtableLoaderPage[]
offset?: string
}
interface AirtableLoaderPage {
id: string
createdTime: string
fields: ICommonObject
}
class AirtableLoader extends BaseDocumentLoader {
public readonly baseId: string
public readonly tableId: string
public readonly accessToken: string
public readonly limit: number
public readonly returnAll: boolean
constructor({ baseId, tableId, accessToken, limit = 100, returnAll = false }: AirtableLoaderParams) {
super()
this.baseId = baseId
this.tableId = tableId
this.accessToken = accessToken
this.limit = limit
this.returnAll = returnAll
}
public async load(): Promise<Document[]> {
if (this.returnAll) {
return this.loadAll()
}
return this.loadLimit()
}
protected async fetchAirtableData(url: string, params: ICommonObject): Promise<AirtableLoaderResponse> {
try {
const headers = {
Authorization: `Bearer ${this.accessToken}`,
'Content-Type': 'application/json',
Accept: 'application/json'
}
const response = await axios.get(url, { params, headers })
return response.data
} catch (error) {
throw new Error(`Failed to fetch ${url} from Airtable: ${error}`)
}
}
private createDocumentFromPage(page: AirtableLoaderPage): Document {
// Generate the URL
const pageUrl = `https://api.airtable.com/v0/${this.baseId}/${this.tableId}/${page.id}`
// Return a langchain document
return new Document({
pageContent: JSON.stringify(page.fields, null, 2),
metadata: {
url: pageUrl
}
})
}
private async loadLimit(): Promise<Document[]> {
const params = { maxRecords: this.limit }
const data = await this.fetchAirtableData(`https://api.airtable.com/v0/${this.baseId}/${this.tableId}`, params)
if (data.records.length === 0) {
return []
}
return data.records.map((page) => this.createDocumentFromPage(page))
}
private async loadAll(): Promise<Document[]> {
const params: ICommonObject = { pageSize: 100 }
let data: AirtableLoaderResponse
let returnPages: AirtableLoaderPage[] = []
do {
data = await this.fetchAirtableData(`https://api.airtable.com/v0/${this.baseId}/${this.tableId}`, params)
returnPages.push.apply(returnPages, data.records)
params.offset = data.offset
} while (data.offset !== undefined)
return returnPages.map((page) => this.createDocumentFromPage(page))
}
}
module.exports = {
nodeClass: Airtable_DocumentLoaders
}

View File

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="256px" height="215px" viewBox="0 0 256 215" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid">
<g>
<path d="M114.25873,2.70101695 L18.8604023,42.1756384 C13.5552723,44.3711638 13.6102328,51.9065311 18.9486282,54.0225085 L114.746142,92.0117514 C123.163769,95.3498757 132.537419,95.3498757 140.9536,92.0117514 L236.75256,54.0225085 C242.08951,51.9065311 242.145916,44.3711638 236.83934,42.1756384 L141.442459,2.70101695 C132.738459,-0.900338983 122.961284,-0.900338983 114.25873,2.70101695" fill="#FFBF00"></path>
<path d="M136.349071,112.756863 L136.349071,207.659101 C136.349071,212.173089 140.900664,215.263892 145.096461,213.600615 L251.844122,172.166219 C254.281184,171.200072 255.879376,168.845451 255.879376,166.224705 L255.879376,71.3224678 C255.879376,66.8084791 251.327783,63.7176768 247.131986,65.3809537 L140.384325,106.815349 C137.94871,107.781496 136.349071,110.136118 136.349071,112.756863" fill="#26B5F8"></path>
<path d="M111.422771,117.65355 L79.742409,132.949912 L76.5257763,134.504714 L9.65047684,166.548104 C5.4112904,168.593211 0.000578531073,165.503855 0.000578531073,160.794612 L0.000578531073,71.7210757 C0.000578531073,70.0173017 0.874160452,68.5463864 2.04568588,67.4384994 C2.53454463,66.9481944 3.08848814,66.5446689 3.66412655,66.2250305 C5.26231864,65.2661153 7.54173107,65.0101153 9.47981017,65.7766689 L110.890522,105.957098 C116.045234,108.002206 116.450206,115.225166 111.422771,117.65355" fill="#ED3049"></path>
<path d="M111.422771,117.65355 L79.742409,132.949912 L2.04568588,67.4384994 C2.53454463,66.9481944 3.08848814,66.5446689 3.66412655,66.2250305 C5.26231864,65.2661153 7.54173107,65.0101153 9.47981017,65.7766689 L110.890522,105.957098 C116.045234,108.002206 116.450206,115.225166 111.422771,117.65355" fill-opacity="0.25" fill="#000000"></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.9 KiB

View File

@ -39,8 +39,7 @@ class NotionDB_DocumentLoaders implements INode {
label: 'Notion Database Id',
name: 'databaseId',
type: 'string',
description:
'If your URL looks like - https://www.notion.so/<long_hash_1>?v=<long_hash_2>, then <long_hash_1> is the database ID'
description: 'If your URL looks like - https://www.notion.so/abcdefh?v=long_hash_2, then abcdefh is the database ID'
},
{
label: 'Metadata',

View File

Before

Width:  |  Height:  |  Size: 11 KiB

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

View File

@ -32,6 +32,8 @@ class HuggingFaceInferenceEmbedding_Embeddings implements INode {
label: 'Model',
name: 'modelName',
type: 'string',
description: 'If using own inference endpoint, leave this blank',
placeholder: 'sentence-transformers/distilbert-base-nli-mean-tokens',
optional: true
},
{

View File

@ -30,18 +30,26 @@ export class HuggingFaceInferenceEmbeddings extends Embeddings implements Huggin
async _embed(texts: string[]): Promise<number[][]> {
// replace newlines, which can negatively affect performance.
const clean = texts.map((text) => text.replace(/\n/g, ' '))
const hf = new HfInference(this.apiKey)
const obj: any = {
inputs: clean
}
if (!this.endpoint) obj.model = this.model
return this.caller.call(() => this.client.featureExtraction(obj)) as Promise<number[][]>
if (this.endpoint) {
hf.endpoint(this.endpoint)
} else {
obj.model = this.model
}
const res = await this.caller.callWithOptions({}, hf.featureExtraction.bind(hf), obj)
return res as number[][]
}
embedQuery(document: string): Promise<number[]> {
return this._embed([document]).then((embeddings) => embeddings[0])
async embedQuery(document: string): Promise<number[]> {
const res = await this._embed([document])
return res[0]
}
embedDocuments(documents: string[]): Promise<number[][]> {
async embedDocuments(documents: string[]): Promise<number[][]> {
return this._embed(documents)
}
}

View File

@ -17,7 +17,7 @@ class OpenAIEmbedding_Embeddings implements INode {
this.label = 'OpenAI Embeddings'
this.name = 'openAIEmbeddings'
this.type = 'OpenAIEmbeddings'
this.icon = 'openai.svg'
this.icon = 'openai.png'
this.category = 'Embeddings'
this.description = 'OpenAI API to generate embeddings for a given text'
this.baseClasses = [this.type, ...getBaseClasses(OpenAIEmbeddings)]

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0,0,256,256" width="96px" height="96px" fill-rule="nonzero"><g fill="#00a67e" fill-rule="nonzero" stroke="none" stroke-width="1" stroke-linecap="butt" stroke-linejoin="miter" stroke-miterlimit="10" stroke-dasharray="" stroke-dashoffset="0" font-family="none" font-weight="none" font-size="none" text-anchor="none" style="mix-blend-mode: normal"><g transform="scale(10.66667,10.66667)"><path d="M11.13477,1.01758c-0.26304,-0.01259 -0.528,-0.00875 -0.79687,0.01563c-2.22436,0.20069 -4.00167,1.76087 -4.72852,3.78711c-1.71233,0.35652 -3.1721,1.48454 -3.9375,3.13672c-0.93789,2.02702 -0.47459,4.34373 0.91602,5.98633c-0.54763,1.66186 -0.30227,3.49111 0.74414,4.97852c1.28618,1.82589 3.52454,2.58282 5.64258,2.19922c1.16505,1.30652 2.87295,2.00936 4.6875,1.8457c2.22441,-0.2007 4.0017,-1.7608 4.72852,-3.78711c1.71235,-0.35654 3.17321,-1.4837 3.93945,-3.13672c0.93809,-2.0267 0.47529,-4.34583 -0.91602,-5.98828c0.54663,-1.66125 0.29983,-3.48985 -0.74609,-4.97656c-1.28618,-1.82589 -3.52454,-2.58282 -5.64258,-2.19922c-0.99242,-1.11291 -2.37852,-1.78893 -3.89062,-1.86133zM11.02539,2.51367c0.89653,0.03518 1.7296,0.36092 2.40625,0.9082c-0.11306,0.05604 -0.23154,0.09454 -0.3418,0.1582l-4.01367,2.31641c-0.306,0.176 -0.496,0.50247 -0.5,0.85547l-0.05859,5.48633l-1.76758,-1.04883v-4.4043c0,-2.136 1.55759,-4.04291 3.68359,-4.25391c0.19937,-0.01975 0.39689,-0.02523 0.5918,-0.01758zM16.125,4.25586c1.27358,0.00756 2.51484,0.5693 3.29297,1.6543c0.65289,0.90943 0.89227,1.99184 0.72852,3.03711c-0.10507,-0.06991 -0.19832,-0.15312 -0.30859,-0.2168l-4.01172,-2.31641c-0.306,-0.176 -0.68224,-0.17886 -0.99023,-0.00586l-4.7832,2.69531l0.02344,-2.05469l3.81445,-2.20117c0.69375,-0.4005 1.47022,-0.59633 2.23438,-0.5918zM5.2832,6.47266c-0.008,0.12587 -0.0332,0.24774 -0.0332,0.375v4.63281c0,0.353 0.18623,0.67938 0.49023,0.85938l4.72461,2.79688l-1.79102,1.00586l-3.81445,-2.20312c-1.85,-1.068 -2.7228,-3.37236 -1.8418,-5.31836c0.46198,-1.02041 1.27879,-1.76751 2.26562,-2.14844zM15.32617,7.85742l3.81445,2.20313c1.85,1.068 2.72475,3.37236 1.84375,5.31836c-0.46209,1.02065 -1.28043,1.7676 -2.26758,2.14844c0.00797,-0.12565 0.0332,-0.24797 0.0332,-0.375v-4.63086c0,-0.354 -0.18623,-0.68133 -0.49023,-0.86133l-4.72461,-2.79687zM12.02539,9.71094l1.96875,1.16797l-0.02734,2.28906l-1.99219,1.11914l-1.96875,-1.16601l0.02539,-2.28906zM15.48242,11.76172l1.76758,1.04883v4.4043c0,2.136 -1.55759,4.04291 -3.68359,4.25391c-1.11644,0.11059 -2.17429,-0.22435 -2.99805,-0.89062c0.11306,-0.05604 0.23154,-0.09454 0.3418,-0.1582l4.01367,-2.31641c0.306,-0.176 0.496,-0.50247 0.5,-0.85547zM13.94727,14.89648l-0.02344,2.05469l-3.81445,2.20117c-1.85,1.068 -4.28234,0.6735 -5.52734,-1.0625c-0.65289,-0.90943 -0.89227,-1.99184 -0.72852,-3.03711c0.10521,0.07006 0.19816,0.15299 0.30859,0.2168l4.01172,2.31641c0.306,0.176 0.68223,0.17886 0.99023,0.00586z"></path></g></g></svg>

Before

Width:  |  Height:  |  Size: 2.9 KiB

View File

@ -17,7 +17,7 @@ class OpenAI_LLMs implements INode {
this.label = 'OpenAI'
this.name = 'openAI'
this.type = 'OpenAI'
this.icon = 'openai.svg'
this.icon = 'openai.png'
this.category = 'LLMs'
this.description = 'Wrapper around OpenAI large language models'
this.baseClasses = [this.type, ...getBaseClasses(OpenAI)]

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0,0,256,256" width="96px" height="96px" fill-rule="nonzero"><g fill="#00a67e" fill-rule="nonzero" stroke="none" stroke-width="1" stroke-linecap="butt" stroke-linejoin="miter" stroke-miterlimit="10" stroke-dasharray="" stroke-dashoffset="0" font-family="none" font-weight="none" font-size="none" text-anchor="none" style="mix-blend-mode: normal"><g transform="scale(10.66667,10.66667)"><path d="M11.13477,1.01758c-0.26304,-0.01259 -0.528,-0.00875 -0.79687,0.01563c-2.22436,0.20069 -4.00167,1.76087 -4.72852,3.78711c-1.71233,0.35652 -3.1721,1.48454 -3.9375,3.13672c-0.93789,2.02702 -0.47459,4.34373 0.91602,5.98633c-0.54763,1.66186 -0.30227,3.49111 0.74414,4.97852c1.28618,1.82589 3.52454,2.58282 5.64258,2.19922c1.16505,1.30652 2.87295,2.00936 4.6875,1.8457c2.22441,-0.2007 4.0017,-1.7608 4.72852,-3.78711c1.71235,-0.35654 3.17321,-1.4837 3.93945,-3.13672c0.93809,-2.0267 0.47529,-4.34583 -0.91602,-5.98828c0.54663,-1.66125 0.29983,-3.48985 -0.74609,-4.97656c-1.28618,-1.82589 -3.52454,-2.58282 -5.64258,-2.19922c-0.99242,-1.11291 -2.37852,-1.78893 -3.89062,-1.86133zM11.02539,2.51367c0.89653,0.03518 1.7296,0.36092 2.40625,0.9082c-0.11306,0.05604 -0.23154,0.09454 -0.3418,0.1582l-4.01367,2.31641c-0.306,0.176 -0.496,0.50247 -0.5,0.85547l-0.05859,5.48633l-1.76758,-1.04883v-4.4043c0,-2.136 1.55759,-4.04291 3.68359,-4.25391c0.19937,-0.01975 0.39689,-0.02523 0.5918,-0.01758zM16.125,4.25586c1.27358,0.00756 2.51484,0.5693 3.29297,1.6543c0.65289,0.90943 0.89227,1.99184 0.72852,3.03711c-0.10507,-0.06991 -0.19832,-0.15312 -0.30859,-0.2168l-4.01172,-2.31641c-0.306,-0.176 -0.68224,-0.17886 -0.99023,-0.00586l-4.7832,2.69531l0.02344,-2.05469l3.81445,-2.20117c0.69375,-0.4005 1.47022,-0.59633 2.23438,-0.5918zM5.2832,6.47266c-0.008,0.12587 -0.0332,0.24774 -0.0332,0.375v4.63281c0,0.353 0.18623,0.67938 0.49023,0.85938l4.72461,2.79688l-1.79102,1.00586l-3.81445,-2.20312c-1.85,-1.068 -2.7228,-3.37236 -1.8418,-5.31836c0.46198,-1.02041 1.27879,-1.76751 2.26562,-2.14844zM15.32617,7.85742l3.81445,2.20313c1.85,1.068 2.72475,3.37236 1.84375,5.31836c-0.46209,1.02065 -1.28043,1.7676 -2.26758,2.14844c0.00797,-0.12565 0.0332,-0.24797 0.0332,-0.375v-4.63086c0,-0.354 -0.18623,-0.68133 -0.49023,-0.86133l-4.72461,-2.79687zM12.02539,9.71094l1.96875,1.16797l-0.02734,2.28906l-1.99219,1.11914l-1.96875,-1.16601l0.02539,-2.28906zM15.48242,11.76172l1.76758,1.04883v4.4043c0,2.136 -1.55759,4.04291 -3.68359,4.25391c-1.11644,0.11059 -2.17429,-0.22435 -2.99805,-0.89062c0.11306,-0.05604 0.23154,-0.09454 0.3418,-0.1582l4.01367,-2.31641c0.306,-0.176 0.496,-0.50247 0.5,-0.85547zM13.94727,14.89648l-0.02344,2.05469l-3.81445,2.20117c-1.85,1.068 -4.28234,0.6735 -5.52734,-1.0625c-0.65289,-0.90943 -0.89227,-1.99184 -0.72852,-3.03711c0.10521,0.07006 0.19816,0.15299 0.30859,0.2168l4.01172,2.31641c0.306,0.176 0.68223,0.17886 0.99023,0.00586z"></path></g></g></svg>

Before

Width:  |  Height:  |  Size: 2.9 KiB

View File

@ -0,0 +1,122 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { Replicate, ReplicateInput } from 'langchain/llms/replicate'
class Replicate_LLMs implements INode {
label: string
name: string
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]
constructor() {
this.label = 'Replicate'
this.name = 'replicate'
this.type = 'Replicate'
this.icon = 'replicate.svg'
this.category = 'LLMs'
this.description = 'Use Replicate to run open source models on cloud'
this.baseClasses = [this.type, 'BaseChatModel', ...getBaseClasses(Replicate)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['replicateApi']
}
this.inputs = [
{
label: 'Model',
name: 'model',
type: 'string',
placeholder: 'a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5',
optional: true
},
{
label: 'Temperature',
name: 'temperature',
type: 'number',
description:
'Adjusts randomness of outputs, greater than 1 is random and 0 is deterministic, 0.75 is a good starting value.',
default: 0.7,
optional: true
},
{
label: 'Max Tokens',
name: 'maxTokens',
type: 'number',
description: 'Maximum number of tokens to generate. A word is generally 2-3 tokens',
optional: true,
additionalParams: true
},
{
label: 'Top Probability',
name: 'topP',
type: 'number',
description:
'When decoding text, samples from the top p percentage of most likely tokens; lower to ignore less likely tokens',
optional: true,
additionalParams: true
},
{
label: 'Repetition Penalty',
name: 'repetitionPenalty',
type: 'number',
description:
'Penalty for repeated words in generated text; 1 is no penalty, values greater than 1 discourage repetition, less than 1 encourage it. (minimum: 0.01; maximum: 5)',
optional: true,
additionalParams: true
},
{
label: 'Additional Inputs',
name: 'additionalInputs',
type: 'json',
description:
'Each model has different parameters, refer to the specific model accepted inputs. For example: <a target="_blank" href="https://replicate.com/a16z-infra/llama13b-v2-chat/api#inputs">llama13b-v2</a>',
additionalParams: true,
optional: true
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const modelName = nodeData.inputs?.model as string
const temperature = nodeData.inputs?.temperature as string
const maxTokens = nodeData.inputs?.maxTokens as string
const topP = nodeData.inputs?.topP as string
const repetitionPenalty = nodeData.inputs?.repetitionPenalty as string
const additionalInputs = nodeData.inputs?.additionalInputs as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
const version = modelName.split(':').pop()
const name = modelName.split(':')[0].split('/').pop()
const org = modelName.split(':')[0].split('/')[0]
const obj: ReplicateInput = {
model: `${org}/${name}:${version}`,
apiKey
}
let inputs: any = {}
if (maxTokens) inputs.max_length = parseInt(maxTokens, 10)
if (temperature) inputs.temperature = parseFloat(temperature)
if (topP) inputs.top_p = parseFloat(topP)
if (repetitionPenalty) inputs.repetition_penalty = parseFloat(repetitionPenalty)
if (additionalInputs) {
const parsedInputs =
typeof additionalInputs === 'object' ? additionalInputs : additionalInputs ? JSON.parse(additionalInputs) : {}
inputs = { ...inputs, ...parsedInputs }
}
if (Object.keys(inputs).length) obj.input = inputs
const model = new Replicate(obj)
return model
}
}
module.exports = { nodeClass: Replicate_LLMs }

View File

@ -0,0 +1,7 @@
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1000 1000" class="logo" xml:space="preserve">
<g>
<polygon points="1000,427.6 1000,540.6 603.4,540.6 603.4,1000 477,1000 477,427.6 "></polygon>
<polygon points="1000,213.8 1000,327 364.8,327 364.8,1000 238.4,1000 238.4,213.8 "></polygon>
<polygon points="1000,0 1000,113.2 126.4,113.2 126.4,1000 0,1000 0,0 "></polygon>
</g>
</svg>

After

Width:  |  Height:  |  Size: 476 B

View File

@ -63,39 +63,49 @@ class DynamoDb_Memory implements INode {
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const tableName = nodeData.inputs?.tableName as string
const partitionKey = nodeData.inputs?.partitionKey as string
const sessionId = nodeData.inputs?.sessionId as string
const region = nodeData.inputs?.region as string
const memoryKey = nodeData.inputs?.memoryKey as string
return initalizeDynamoDB(nodeData, options)
}
const chatId = options.chatId
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const accessKey = getCredentialParam('accessKey', credentialData, nodeData)
const secretAccessKey = getCredentialParam('secretAccessKey', credentialData, nodeData)
const dynamoDb = new DynamoDBChatMessageHistory({
tableName,
partitionKey,
sessionId: sessionId ? sessionId : chatId,
config: {
region,
credentials: {
accessKeyId: accessKey,
secretAccessKey
}
}
})
const memory = new BufferMemory({
memoryKey,
chatHistory: dynamoDb,
returnMessages: true
})
return memory
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const dynamodbMemory = await initalizeDynamoDB(nodeData, options)
await dynamodbMemory.clear()
}
}
const initalizeDynamoDB = async (nodeData: INodeData, options: ICommonObject): Promise<BufferMemory> => {
const tableName = nodeData.inputs?.tableName as string
const partitionKey = nodeData.inputs?.partitionKey as string
const sessionId = nodeData.inputs?.sessionId as string
const region = nodeData.inputs?.region as string
const memoryKey = nodeData.inputs?.memoryKey as string
const chatId = options.chatId
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const accessKeyId = getCredentialParam('accessKey', credentialData, nodeData)
const secretAccessKey = getCredentialParam('secretAccessKey', credentialData, nodeData)
const dynamoDb = new DynamoDBChatMessageHistory({
tableName,
partitionKey,
sessionId: sessionId ? sessionId : chatId,
config: {
region,
credentials: {
accessKeyId,
secretAccessKey
}
}
})
const memory = new BufferMemory({
memoryKey,
chatHistory: dynamoDb,
returnMessages: true
})
return memory
}
module.exports = { nodeClass: DynamoDb_Memory }

View File

@ -58,37 +58,46 @@ class MotorMemory_Memory implements INode {
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const memoryKey = nodeData.inputs?.memoryKey as string
const baseURL = nodeData.inputs?.baseURL as string
const sessionId = nodeData.inputs?.sessionId as string
return initalizeMotorhead(nodeData, options)
}
const chatId = options?.chatId as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
const clientId = getCredentialParam('clientId', credentialData, nodeData)
let obj: MotorheadMemoryInput = {
returnMessages: true,
sessionId: sessionId ? sessionId : chatId,
memoryKey
}
if (baseURL) {
obj = {
...obj,
url: baseURL
}
} else {
obj = {
...obj,
apiKey,
clientId
}
}
return new MotorheadMemory(obj)
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const motorhead = await initalizeMotorhead(nodeData, options)
await motorhead.clear()
}
}
const initalizeMotorhead = async (nodeData: INodeData, options: ICommonObject): Promise<MotorheadMemory> => {
const memoryKey = nodeData.inputs?.memoryKey as string
const baseURL = nodeData.inputs?.baseURL as string
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
const clientId = getCredentialParam('clientId', credentialData, nodeData)
let obj: MotorheadMemoryInput = {
returnMessages: true,
sessionId: sessionId ? sessionId : chatId,
memoryKey
}
if (baseURL) {
obj = {
...obj,
url: baseURL
}
} else {
obj = {
...obj,
apiKey,
clientId
}
}
return new MotorheadMemory(obj)
}
module.exports = { nodeClass: MotorMemory_Memory }

View File

@ -58,31 +58,40 @@ class RedisBackedChatMemory_Memory implements INode {
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const baseURL = nodeData.inputs?.baseURL as string
const sessionId = nodeData.inputs?.sessionId as string
const sessionTTL = nodeData.inputs?.sessionTTL as number
const memoryKey = nodeData.inputs?.memoryKey as string
return initalizeRedis(nodeData, options)
}
const chatId = options?.chatId as string
const redisClient = createClient({ url: baseURL })
let obj: RedisChatMessageHistoryInput = {
sessionId: sessionId ? sessionId : chatId,
client: redisClient
}
if (sessionTTL) {
obj = {
...obj,
sessionTTL
}
}
let redisChatMessageHistory = new RedisChatMessageHistory(obj)
let redis = new BufferMemory({ memoryKey, chatHistory: redisChatMessageHistory, returnMessages: true })
return redis
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const redis = initalizeRedis(nodeData, options)
redis.clear()
}
}
const initalizeRedis = (nodeData: INodeData, options: ICommonObject): BufferMemory => {
const baseURL = nodeData.inputs?.baseURL as string
const sessionId = nodeData.inputs?.sessionId as string
const sessionTTL = nodeData.inputs?.sessionTTL as number
const memoryKey = nodeData.inputs?.memoryKey as string
const chatId = options?.chatId as string
const redisClient = createClient({ url: baseURL })
let obj: RedisChatMessageHistoryInput = {
sessionId: sessionId ? sessionId : chatId,
client: redisClient
}
if (sessionTTL) {
obj = {
...obj,
sessionTTL
}
}
let redisChatMessageHistory = new RedisChatMessageHistory(obj)
let redis = new BufferMemory({ memoryKey, chatHistory: redisChatMessageHistory, returnMessages: true })
return redis
}
module.exports = { nodeClass: RedisBackedChatMemory_Memory }

View File

@ -107,33 +107,12 @@ class ZepMemory_Memory implements INode {
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const baseURL = nodeData.inputs?.baseURL as string
const aiPrefix = nodeData.inputs?.aiPrefix as string
const humanPrefix = nodeData.inputs?.humanPrefix as string
const memoryKey = nodeData.inputs?.memoryKey as string
const inputKey = nodeData.inputs?.inputKey as string
const autoSummaryTemplate = nodeData.inputs?.autoSummaryTemplate as string
const autoSummary = nodeData.inputs?.autoSummary as boolean
const sessionId = nodeData.inputs?.sessionId as string
const k = nodeData.inputs?.k as string
const chatId = options?.chatId as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
const obj: ZepMemoryInput = {
baseURL,
sessionId: sessionId ? sessionId : chatId,
aiPrefix,
humanPrefix,
returnMessages: true,
memoryKey,
inputKey
}
if (apiKey) obj.apiKey = apiKey
let zep = new ZepMemory(obj)
let zep = await initalizeZep(nodeData, options)
// hack to support summary
let tmpFunc = zep.loadMemoryVariables
@ -158,6 +137,38 @@ class ZepMemory_Memory implements INode {
}
return zep
}
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
const zep = await initalizeZep(nodeData, options)
await zep.clear()
}
}
const initalizeZep = async (nodeData: INodeData, options: ICommonObject): Promise<ZepMemory> => {
const baseURL = nodeData.inputs?.baseURL as string
const aiPrefix = nodeData.inputs?.aiPrefix as string
const humanPrefix = nodeData.inputs?.humanPrefix as string
const memoryKey = nodeData.inputs?.memoryKey as string
const inputKey = nodeData.inputs?.inputKey as string
const sessionId = nodeData.inputs?.sessionId as string
const chatId = options?.chatId as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
const obj: ZepMemoryInput = {
baseURL,
sessionId: sessionId ? sessionId : chatId,
aiPrefix,
humanPrefix,
returnMessages: true,
memoryKey,
inputKey
}
if (apiKey) obj.apiKey = apiKey
return new ZepMemory(obj)
}
module.exports = { nodeClass: ZepMemory_Memory }

View File

@ -58,7 +58,7 @@ class ChatPromptTemplate_Prompts implements INode {
let promptValues: ICommonObject = {}
if (promptValuesStr) {
promptValues = JSON.parse(promptValuesStr.replace(/\s/g, ''))
promptValues = JSON.parse(promptValuesStr)
}
// @ts-ignore
prompt.promptValues = promptValues

View File

@ -86,7 +86,7 @@ class FewShotPromptTemplate_Prompts implements INode {
const examplePrompt = nodeData.inputs?.examplePrompt as PromptTemplate
const inputVariables = getInputVariables(suffix)
const examples: Example[] = JSON.parse(examplesStr.replace(/\s/g, ''))
const examples: Example[] = JSON.parse(examplesStr)
try {
const obj: FewShotPromptTemplateInput = {

View File

@ -45,7 +45,7 @@ class PromptTemplate_Prompts implements INode {
let promptValues: ICommonObject = {}
if (promptValuesStr) {
promptValues = JSON.parse(promptValuesStr.replace(/\s/g, ''))
promptValues = JSON.parse(promptValuesStr)
}
const inputVariables = getInputVariables(template)

View File

@ -0,0 +1,121 @@
import { VectorStore } from 'langchain/vectorstores/base'
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { HydeRetriever, HydeRetrieverOptions, PromptKey } from 'langchain/retrievers/hyde'
import { BaseLanguageModel } from 'langchain/base_language'
import { PromptTemplate } from 'langchain/prompts'
class HydeRetriever_Retrievers implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'Hyde Retriever'
this.name = 'HydeRetriever'
this.type = 'HydeRetriever'
this.icon = 'hyderetriever.svg'
this.category = 'Retrievers'
this.description = 'Use HyDE retriever to retrieve from a vector store'
this.baseClasses = [this.type, 'BaseRetriever']
this.inputs = [
{
label: 'Language Model',
name: 'model',
type: 'BaseLanguageModel'
},
{
label: 'Vector Store',
name: 'vectorStore',
type: 'VectorStore'
},
{
label: 'Prompt Key',
name: 'promptKey',
type: 'options',
options: [
{
label: 'websearch',
name: 'websearch'
},
{
label: 'scifact',
name: 'scifact'
},
{
label: 'arguana',
name: 'arguana'
},
{
label: 'trec-covid',
name: 'trec-covid'
},
{
label: 'fiqa',
name: 'fiqa'
},
{
label: 'dbpedia-entity',
name: 'dbpedia-entity'
},
{
label: 'trec-news',
name: 'trec-news'
},
{
label: 'mr-tydi',
name: 'mr-tydi'
}
],
default: 'websearch'
},
{
label: 'Custom Prompt',
name: 'customPrompt',
description: 'If custom prompt is used, this will override Prompt Key',
placeholder: 'Please write a passage to answer the question\nQuestion: {question}\nPassage:',
type: 'string',
rows: 4,
additionalParams: true,
optional: true
},
{
label: 'Top K',
name: 'topK',
description: 'Number of top results to fetch. Default to 4',
placeholder: '4',
type: 'number',
default: 4,
additionalParams: true,
optional: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const llm = nodeData.inputs?.model as BaseLanguageModel
const vectorStore = nodeData.inputs?.vectorStore as VectorStore
const promptKey = nodeData.inputs?.promptKey as PromptKey
const customPrompt = nodeData.inputs?.customPrompt as string
const topK = nodeData.inputs?.topK as string
const k = topK ? parseInt(topK, 10) : 4
const obj: HydeRetrieverOptions<any> = {
llm,
vectorStore,
k
}
if (customPrompt) obj.promptTemplate = PromptTemplate.fromTemplate(customPrompt)
else if (promptKey) obj.promptTemplate = promptKey
const retriever = new HydeRetriever(obj)
return retriever
}
}
module.exports = { nodeClass: HydeRetriever_Retrievers }

View File

@ -0,0 +1,9 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-database-export" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M4 6c0 1.657 3.582 3 8 3s8 -1.343 8 -3s-3.582 -3 -8 -3s-8 1.343 -8 3"></path>
<path d="M4 6v6c0 1.657 3.582 3 8 3c1.118 0 2.183 -.086 3.15 -.241"></path>
<path d="M20 12v-6"></path>
<path d="M4 12v6c0 1.657 3.582 3 8 3c.157 0 .312 -.002 .466 -.005"></path>
<path d="M16 19h6"></path>
<path d="M19 16l3 3l-3 3"></path>
</svg>

After

Width:  |  Height:  |  Size: 647 B

View File

@ -0,0 +1,70 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { MarkdownTextSplitter, MarkdownTextSplitterParams } from 'langchain/text_splitter'
import { NodeHtmlMarkdown } from 'node-html-markdown'
class HtmlToMarkdownTextSplitter_TextSplitters implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'HtmlToMarkdown Text Splitter'
this.name = 'htmlToMarkdownTextSplitter'
this.type = 'HtmlToMarkdownTextSplitter'
this.icon = 'htmlToMarkdownTextSplitter.svg'
this.category = 'Text Splitters'
this.description = `Converts Html to Markdown and then split your content into documents based on the Markdown headers`
this.baseClasses = [this.type, ...getBaseClasses(HtmlToMarkdownTextSplitter)]
this.inputs = [
{
label: 'Chunk Size',
name: 'chunkSize',
type: 'number',
default: 1000,
optional: true
},
{
label: 'Chunk Overlap',
name: 'chunkOverlap',
type: 'number',
optional: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const chunkSize = nodeData.inputs?.chunkSize as string
const chunkOverlap = nodeData.inputs?.chunkOverlap as string
const obj = {} as MarkdownTextSplitterParams
if (chunkSize) obj.chunkSize = parseInt(chunkSize, 10)
if (chunkOverlap) obj.chunkOverlap = parseInt(chunkOverlap, 10)
const splitter = new HtmlToMarkdownTextSplitter(obj)
return splitter
}
}
class HtmlToMarkdownTextSplitter extends MarkdownTextSplitter implements MarkdownTextSplitterParams {
constructor(fields?: Partial<MarkdownTextSplitterParams>) {
{
super(fields)
}
}
splitText(text: string): Promise<string[]> {
return new Promise((resolve) => {
const markdown = NodeHtmlMarkdown.translate(text)
super.splitText(markdown).then((result) => {
resolve(result)
})
})
}
}
module.exports = { nodeClass: HtmlToMarkdownTextSplitter_TextSplitters }

View File

@ -0,0 +1,6 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-markdown" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M3 5m0 2a2 2 0 0 1 2 -2h14a2 2 0 0 1 2 2v10a2 2 0 0 1 -2 2h-14a2 2 0 0 1 -2 -2z"></path>
<path d="M7 15v-6l2 2l2 -2v6"></path>
<path d="M14 13l2 2l2 -2m-2 2v-6"></path>
</svg>

After

Width:  |  Height:  |  Size: 482 B

View File

@ -0,0 +1,40 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { BraveSearch } from 'langchain/tools'
class BraveSearchAPI_Tools implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]
constructor() {
this.label = 'BraveSearch API'
this.name = 'braveSearchAPI'
this.type = 'BraveSearchAPI'
this.icon = 'brave.svg'
this.category = 'Tools'
this.description = 'Wrapper around BraveSearch API - a real-time API to access Brave search results'
this.inputs = []
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['braveSearchApi']
}
this.baseClasses = [this.type, ...getBaseClasses(BraveSearch)]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const braveApiKey = getCredentialParam('braveApiKey', credentialData, nodeData)
return new BraveSearch({ apiKey: braveApiKey })
}
}
module.exports = { nodeClass: BraveSearchAPI_Tools }

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 48 48" width="96px" height="96px"><path fill="#ff651f" d="M41,13l1,4l-4.09,16.35c-0.59,2.35-2.01,4.41-4.01,5.79l-8.19,5.68c-0.51,0.36-1.11,0.53-1.71,0.53 c-0.6,0-1.2-0.17-1.71-0.53l-8.19-5.68c-2-1.38-3.42-3.44-4.01-5.79L6,17l1-4l-1-2l3.25-3.25c1.05-1.05,2.6-1.44,4.02-0.99 c0.04,0.01,0.07,0.02,0.1,0.03L14,7l4-4h12l4,4l0.65-0.22c0.83-0.28,1.7-0.27,2.5,0c0.58,0.19,1.13,0.51,1.58,0.95 c0.01,0.01,0.01,0.01,0.02,0.02L42,11L41,13z"/><path fill="#f4592b" d="M38.73,7.73L33,11l-9,2l-9-3l-2.07-2.07c-0.56-0.56-1.41-0.74-2.15-0.44L8.67,8.33l0.58-0.58 c1.05-1.05,2.6-1.44,4.02-0.99c0.04,0.01,0.07,0.02,0.1,0.03L14,7l4-4h12l4,4l0.65-0.22c0.83-0.28,1.7-0.27,2.5,0 C37.73,6.97,38.28,7.29,38.73,7.73z"/><path fill="#fff" d="M32.51,23.49c-0.3,0.3-0.38,0.77-0.19,1.15l0.34,0.68c0.22,0.45,0.34,0.94,0.34,1.44 c0,0.8-0.29,1.57-0.83,2.16l-0.66,0.74c-0.32,0.21-0.72,0.23-1.04,0.05l-5.23-2.88c-0.59-0.4-0.6-1.27-0.01-1.66l3.91-2.66 c0.48-0.28,0.63-0.89,0.35-1.37l-1.9-3.16C27.28,17.46,27.45,17.24,28,17l6-3h-5l-3,0.75c-0.55,0.14-0.87,0.7-0.72,1.24l1.46,5.09 c0.14,0.51-0.14,1.05-0.65,1.22l-1.47,0.49c-0.21,0.07-0.41,0.11-0.62,0.11c-0.21,0-0.42-0.04-0.63-0.11l-1.46-0.49 c-0.51-0.17-0.79-0.71-0.65-1.22l1.46-5.09c0.15-0.54-0.17-1.1-0.72-1.24L19,14h-5l6,3c0.55,0.24,0.72,0.46,0.41,0.98l-1.9,3.16 c-0.28,0.48-0.13,1.09,0.35,1.37l3.91,2.66c0.59,0.39,0.58,1.26-0.01,1.66l-5.23,2.88c-0.32,0.18-0.72,0.16-1.04-0.05l-0.66-0.74 C15.29,28.33,15,27.56,15,26.76c0-0.5,0.12-0.99,0.34-1.44l0.34-0.68c0.19-0.38,0.11-0.85-0.19-1.15l-4.09-4.83 c-0.83-0.99-0.94-2.41-0.26-3.51l3.4-5.54c0.27-0.36,0.75-0.49,1.17-0.33l2.62,1.05c0.48,0.19,0.99,0.29,1.49,0.29 c0.61,0,1.23-0.14,1.79-0.42c0.75-0.38,1.57-0.57,2.39-0.57s1.64,0.19,2.39,0.57c1.03,0.51,2.22,0.56,3.28,0.13l2.62-1.05 c0.42-0.16,0.9-0.03,1.17,0.33l3.4,5.54c0.68,1.1,0.57,2.52-0.26,3.51L32.51,23.49z"/><path fill="#fff" d="M29.51,32.49l-4.8,3.8c-0.19,0.19-0.45,0.29-0.71,0.29s-0.52-0.1-0.71-0.29l-4.8-3.8 c-0.24-0.24-0.17-0.65,0.13-0.8l4.93-2.47c0.14-0.07,0.29-0.1,0.45-0.1s0.31,0.03,0.45,0.1l4.93,2.47 C29.68,31.84,29.75,32.25,29.51,32.49z"/><path fill="#ed4d01" d="M41,13l1,4l-4.09,16.35c-0.59,2.35-2.01,4.41-4.01,5.79l-8.19,5.68c-0.51,0.36-1.11,0.53-1.71,0.53 V10.36L25,12h7v-2l5.15-3.22c0.59,0.19,1.15,0.52,1.6,0.97L42,11L41,13z"/><path fill="#f5f5f5" d="M32.51,23.49c-0.3,0.3-0.38,0.77-0.19,1.15l0.34,0.68c0.22,0.45,0.34,0.94,0.34,1.44 c0,0.8-0.29,1.57-0.83,2.16l-0.66,0.74c-0.32,0.21-0.72,0.23-1.04,0.05l-5.23-2.88c-0.59-0.4-0.6-1.27-0.01-1.66l3.91-2.66 c0.48-0.28,0.63-0.89,0.35-1.37l-1.9-3.16C27.28,17.46,27.45,17.24,28,17l6-3h-5l-3,0.75c-0.55,0.14-0.87,0.7-0.72,1.24l1.46,5.09 c0.14,0.51-0.14,1.05-0.65,1.22l-1.47,0.49c-0.21,0.07-0.41,0.11-0.62,0.11V9.63c0.82,0,1.64,0.19,2.39,0.57 c1.03,0.51,2.22,0.56,3.28,0.13l2.62-1.05c0.42-0.16,0.9-0.03,1.17,0.33l3.4,5.54c0.68,1.1,0.57,2.52-0.26,3.51L32.51,23.49z"/><path fill="#f5f5f5" d="M29.51,32.49l-4.8,3.8c-0.19,0.19-0.45,0.29-0.71,0.29v-7.46c0.16,0,0.31,0.03,0.45,0.1l4.93,2.47 C29.68,31.84,29.75,32.25,29.51,32.49z"/></svg>

After

Width:  |  Height:  |  Size: 3.0 KiB

View File

@ -2,7 +2,37 @@ import { z } from 'zod'
import { CallbackManagerForToolRun } from 'langchain/callbacks'
import { StructuredTool, ToolParams } from 'langchain/tools'
import { NodeVM } from 'vm2'
import { availableDependencies } from '../../../src/utils'
/*
* List of dependencies allowed to be import in vm2
*/
const availableDependencies = [
'@dqbd/tiktoken',
'@getzep/zep-js',
'@huggingface/inference',
'@pinecone-database/pinecone',
'@supabase/supabase-js',
'axios',
'cheerio',
'chromadb',
'cohere-ai',
'd3-dsv',
'form-data',
'graphql',
'html-to-text',
'langchain',
'linkifyjs',
'mammoth',
'moment',
'node-fetch',
'pdf-parse',
'pdfjs-dist',
'playwright',
'puppeteer',
'srt-parser-2',
'typeorm',
'weaviate-ts-client'
]
export interface BaseDynamicToolInput extends ToolParams {
name: string
@ -51,25 +81,37 @@ export class DynamicStructuredTool<
}
}
const defaultAllowBuiltInDep = [
'assert',
'buffer',
'crypto',
'events',
'http',
'https',
'net',
'path',
'querystring',
'timers',
'tls',
'url',
'zlib'
]
const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP
? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(','))
: defaultAllowBuiltInDep
const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : []
const deps = availableDependencies.concat(externalDeps)
const options = {
console: 'inherit',
sandbox,
require: {
external: false as boolean | { modules: string[] },
builtin: ['*']
external: { modules: deps },
builtin: builtinDeps
}
} as any
const external = JSON.stringify(availableDependencies)
if (external) {
const deps = JSON.parse(external)
if (deps && deps.length) {
options.require.external = {
modules: deps
}
}
}
const vm = new NodeVM(options)
const response = await vm.run(`module.exports = async function() {${this.code}}()`, __dirname)

View File

@ -0,0 +1,144 @@
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { Embeddings } from 'langchain/embeddings/base'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { SingleStoreVectorStore, SingleStoreVectorStoreConfig } from 'langchain/vectorstores/singlestore'
class SingleStoreExisting_VectorStores implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
credential: INodeParams
outputs: INodeOutputsValue[]
constructor() {
this.label = 'SingleStore Load Existing Table'
this.name = 'singlestoreExisting'
this.type = 'SingleStore'
this.icon = 'singlestore.svg'
this.category = 'Vector Stores'
this.description = 'Load existing document from SingleStore'
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
description: 'Needed when using SingleStore cloud hosted',
optional: true,
credentialNames: ['singleStoreApi']
}
this.inputs = [
{
label: 'Embeddings',
name: 'embeddings',
type: 'Embeddings'
},
{
label: 'Host',
name: 'host',
type: 'string'
},
{
label: 'Database',
name: 'database',
type: 'string'
},
{
label: 'Table Name',
name: 'tableName',
type: 'string',
placeholder: 'embeddings',
additionalParams: true,
optional: true
},
{
label: 'Content Column Name',
name: 'contentColumnName',
type: 'string',
placeholder: 'content',
additionalParams: true,
optional: true
},
{
label: 'Vector Column Name',
name: 'vectorColumnName',
type: 'string',
placeholder: 'vector',
additionalParams: true,
optional: true
},
{
label: 'Metadata Column Name',
name: 'metadataColumnName',
type: 'string',
placeholder: 'metadata',
additionalParams: true,
optional: true
},
{
label: 'Top K',
name: 'topK',
placeholder: '4',
type: 'number',
additionalParams: true,
optional: true
}
]
this.outputs = [
{
label: 'SingleStore Retriever',
name: 'retriever',
baseClasses: this.baseClasses
},
{
label: 'SingleStore Vector Store',
name: 'vectorStore',
baseClasses: [this.type, ...getBaseClasses(SingleStoreVectorStore)]
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const user = getCredentialParam('user', credentialData, nodeData)
const password = getCredentialParam('password', credentialData, nodeData)
const singleStoreConnectionConfig = {
connectionOptions: {
host: nodeData.inputs?.host as string,
port: 3306,
user,
password,
database: nodeData.inputs?.database as string
},
...(nodeData.inputs?.tableName ? { tableName: nodeData.inputs.tableName as string } : {}),
...(nodeData.inputs?.contentColumnName ? { contentColumnName: nodeData.inputs.contentColumnName as string } : {}),
...(nodeData.inputs?.vectorColumnName ? { vectorColumnName: nodeData.inputs.vectorColumnName as string } : {}),
...(nodeData.inputs?.metadataColumnName ? { metadataColumnName: nodeData.inputs.metadataColumnName as string } : {})
} as SingleStoreVectorStoreConfig
const embeddings = nodeData.inputs?.embeddings as Embeddings
const output = nodeData.outputs?.output as string
const topK = nodeData.inputs?.topK as string
const k = topK ? parseInt(topK, 10) : 4
let vectorStore: SingleStoreVectorStore
vectorStore = new SingleStoreVectorStore(embeddings, singleStoreConnectionConfig)
if (output === 'retriever') {
const retriever = vectorStore.asRetriever(k)
return retriever
} else if (output === 'vectorStore') {
;(vectorStore as any).k = k
return vectorStore
}
return vectorStore
}
}
module.exports = { nodeClass: SingleStoreExisting_VectorStores }

View File

@ -0,0 +1,20 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="256px" height="256px" viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" preserveAspectRatio="xMidYMid">
<title>SingleStore</title>
<defs>
<linearGradient x1="67.3449258%" y1="-26.0044686%" x2="-18.5227789%" y2="22.9877555%" id="singleStoreLinearGradient-1">
<stop stop-color="#FF7BFF" offset="0%"></stop>
<stop stop-color="#AA00FF" offset="35.0158%"></stop>
<stop stop-color="#8800CC" offset="100%"></stop>
</linearGradient>
<linearGradient x1="36.2591509%" y1="-19.3628763%" x2="111.72205%" y2="44.9975357%" id="singleStoreLinearGradient-2">
<stop stop-color="#FF7BFF" offset="3.54358%"></stop>
<stop stop-color="#8800CC" offset="57.6537%"></stop>
<stop stop-color="#311B92" offset="100%"></stop>
</linearGradient>
</defs>
<g>
<path d="M133.793438,0 C161.220114,7.62806846 186.208847,26.8506986 196.569923,50.3452712 C212.416431,88.4856134 208.759637,136.695058 191.389506,165.376569 C176.761849,188.8709 154.211058,201.381085 128.308006,201.075829 C88.0823106,200.770814 55.4752171,168.732936 55.1704441,128.456768 C55.1704441,88.1803574 86.8634599,54.9221798 128.308006,54.9221798 C135.012288,54.9221798 144.679052,55.851955 155.649674,60.4286222 C155.649674,60.4286222 147.762766,55.757287 127.50695,52.6192355 C69.3015772,44.9912153 0.621898574,89.095884 16.4683968,190.701711 C38.409617,229.757339 80.4639504,256.303989 128.308006,255.997284 C198.703093,255.692994 256.299161,198.024717 255.996071,127.236226 C255.996071,59.498847 200.836263,1.83073691 133.793438,0 Z" fill="url(#singleStoreLinearGradient-1)"></path>
<path d="M181.635561,54.0037552 C171.884031,33.5605356 151.771183,17.3889203 127.087223,10.9813448 C121.601791,9.45574074 115.811828,8.84547014 109.412318,8.540359 C99.9653199,8.540359 90.8230945,9.76087603 81.376096,12.2018618 C57.9109865,19.2196838 41.455174,32.950265 31.7034025,43.6293966 C19.20906,57.9701518 10.9810571,72.9211776 6.1052197,87.8722034 C6.1052197,88.1774594 5.8004708,88.4824739 5.8004708,89.0927445 C5.4957219,90.3132857 4.27677462,93.9746678 4.27677462,94.8901944 C3.97202572,95.500465 3.97202572,96.4157502 3.66730098,97.0260207 C3.36255208,98.2465619 3.05780318,99.4668616 2.75307844,100.687403 C2.75307844,100.992659 2.75307844,101.297673 2.44832954,101.602688 C-5.47492441,140.963571 7.68750379,176.286091 15.6107577,189.406305 C17.5925312,192.688049 19.2199033,195.425935 20.8508738,197.938019 C2.87119611,100.298588 54.558966,53.6984992 113.373885,52.477958 C144.152582,51.8679289 174.931278,64.3778723 189.863709,89.3980006 C188.949389,75.6672745 188.035312,68.0392543 181.635561,54.0037552 Z" fill="url(#singleStoreLinearGradient-2)"></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.8 KiB

View File

@ -0,0 +1,160 @@
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { Embeddings } from 'langchain/embeddings/base'
import { Document } from 'langchain/document'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { SingleStoreVectorStore, SingleStoreVectorStoreConfig } from 'langchain/vectorstores/singlestore'
import { flatten } from 'lodash'
class SingleStoreUpsert_VectorStores implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
credential: INodeParams
outputs: INodeOutputsValue[]
constructor() {
this.label = 'SingleStore Upsert Document'
this.name = 'singlestoreUpsert'
this.type = 'SingleStore'
this.icon = 'singlestore.svg'
this.category = 'Vector Stores'
this.description = 'Upsert documents to SingleStore'
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
description: 'Needed when using SingleStore cloud hosted',
optional: true,
credentialNames: ['singleStoreApi']
}
this.inputs = [
{
label: 'Document',
name: 'document',
type: 'Document',
list: true
},
{
label: 'Embeddings',
name: 'embeddings',
type: 'Embeddings'
},
{
label: 'Host',
name: 'host',
type: 'string'
},
{
label: 'Database',
name: 'database',
type: 'string'
},
{
label: 'Table Name',
name: 'tableName',
type: 'string',
placeholder: 'embeddings',
additionalParams: true,
optional: true
},
{
label: 'Content Column Name',
name: 'contentColumnName',
type: 'string',
placeholder: 'content',
additionalParams: true,
optional: true
},
{
label: 'Vector Column Name',
name: 'vectorColumnName',
type: 'string',
placeholder: 'vector',
additionalParams: true,
optional: true
},
{
label: 'Metadata Column Name',
name: 'metadataColumnName',
type: 'string',
placeholder: 'metadata',
additionalParams: true,
optional: true
},
{
label: 'Top K',
name: 'topK',
placeholder: '4',
type: 'number',
additionalParams: true,
optional: true
}
]
this.outputs = [
{
label: 'SingleStore Retriever',
name: 'retriever',
baseClasses: this.baseClasses
},
{
label: 'SingleStore Vector Store',
name: 'vectorStore',
baseClasses: [this.type, ...getBaseClasses(SingleStoreVectorStore)]
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const user = getCredentialParam('user', credentialData, nodeData)
const password = getCredentialParam('password', credentialData, nodeData)
const singleStoreConnectionConfig = {
connectionOptions: {
host: nodeData.inputs?.host as string,
port: 3306,
user,
password,
database: nodeData.inputs?.database as string
},
...(nodeData.inputs?.tableName ? { tableName: nodeData.inputs.tableName as string } : {}),
...(nodeData.inputs?.contentColumnName ? { contentColumnName: nodeData.inputs.contentColumnName as string } : {}),
...(nodeData.inputs?.vectorColumnName ? { vectorColumnName: nodeData.inputs.vectorColumnName as string } : {}),
...(nodeData.inputs?.metadataColumnName ? { metadataColumnName: nodeData.inputs.metadataColumnName as string } : {})
} as SingleStoreVectorStoreConfig
const docs = nodeData.inputs?.document as Document[]
const embeddings = nodeData.inputs?.embeddings as Embeddings
const output = nodeData.outputs?.output as string
const topK = nodeData.inputs?.topK as string
const k = topK ? parseInt(topK, 10) : 4
const flattenDocs = docs && docs.length ? flatten(docs) : []
const finalDocs = []
for (let i = 0; i < flattenDocs.length; i += 1) {
finalDocs.push(new Document(flattenDocs[i]))
}
let vectorStore: SingleStoreVectorStore
vectorStore = new SingleStoreVectorStore(embeddings, singleStoreConnectionConfig)
vectorStore.addDocuments.bind(vectorStore)(finalDocs)
if (output === 'retriever') {
const retriever = vectorStore.asRetriever(k)
return retriever
} else if (output === 'vectorStore') {
;(vectorStore as any).k = k
return vectorStore
}
return vectorStore
}
}
module.exports = { nodeClass: SingleStoreUpsert_VectorStores }

View File

@ -0,0 +1,20 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="256px" height="256px" viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" preserveAspectRatio="xMidYMid">
<title>SingleStore</title>
<defs>
<linearGradient x1="67.3449258%" y1="-26.0044686%" x2="-18.5227789%" y2="22.9877555%" id="singleStoreLinearGradient-1">
<stop stop-color="#FF7BFF" offset="0%"></stop>
<stop stop-color="#AA00FF" offset="35.0158%"></stop>
<stop stop-color="#8800CC" offset="100%"></stop>
</linearGradient>
<linearGradient x1="36.2591509%" y1="-19.3628763%" x2="111.72205%" y2="44.9975357%" id="singleStoreLinearGradient-2">
<stop stop-color="#FF7BFF" offset="3.54358%"></stop>
<stop stop-color="#8800CC" offset="57.6537%"></stop>
<stop stop-color="#311B92" offset="100%"></stop>
</linearGradient>
</defs>
<g>
<path d="M133.793438,0 C161.220114,7.62806846 186.208847,26.8506986 196.569923,50.3452712 C212.416431,88.4856134 208.759637,136.695058 191.389506,165.376569 C176.761849,188.8709 154.211058,201.381085 128.308006,201.075829 C88.0823106,200.770814 55.4752171,168.732936 55.1704441,128.456768 C55.1704441,88.1803574 86.8634599,54.9221798 128.308006,54.9221798 C135.012288,54.9221798 144.679052,55.851955 155.649674,60.4286222 C155.649674,60.4286222 147.762766,55.757287 127.50695,52.6192355 C69.3015772,44.9912153 0.621898574,89.095884 16.4683968,190.701711 C38.409617,229.757339 80.4639504,256.303989 128.308006,255.997284 C198.703093,255.692994 256.299161,198.024717 255.996071,127.236226 C255.996071,59.498847 200.836263,1.83073691 133.793438,0 Z" fill="url(#singleStoreLinearGradient-1)"></path>
<path d="M181.635561,54.0037552 C171.884031,33.5605356 151.771183,17.3889203 127.087223,10.9813448 C121.601791,9.45574074 115.811828,8.84547014 109.412318,8.540359 C99.9653199,8.540359 90.8230945,9.76087603 81.376096,12.2018618 C57.9109865,19.2196838 41.455174,32.950265 31.7034025,43.6293966 C19.20906,57.9701518 10.9810571,72.9211776 6.1052197,87.8722034 C6.1052197,88.1774594 5.8004708,88.4824739 5.8004708,89.0927445 C5.4957219,90.3132857 4.27677462,93.9746678 4.27677462,94.8901944 C3.97202572,95.500465 3.97202572,96.4157502 3.66730098,97.0260207 C3.36255208,98.2465619 3.05780318,99.4668616 2.75307844,100.687403 C2.75307844,100.992659 2.75307844,101.297673 2.44832954,101.602688 C-5.47492441,140.963571 7.68750379,176.286091 15.6107577,189.406305 C17.5925312,192.688049 19.2199033,195.425935 20.8508738,197.938019 C2.87119611,100.298588 54.558966,53.6984992 113.373885,52.477958 C144.152582,51.8679289 174.931278,64.3778723 189.863709,89.3980006 C188.949389,75.6672745 188.035312,68.0392543 181.635561,54.0037552 Z" fill="url(#singleStoreLinearGradient-2)"></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.8 KiB

View File

@ -1,6 +1,6 @@
{
"name": "flowise-components",
"version": "1.2.16",
"version": "1.2.17",
"description": "Flowiseai Components",
"main": "dist/src/index",
"types": "dist/src/index.d.ts",
@ -18,13 +18,13 @@
"dependencies": {
"@aws-sdk/client-dynamodb": "^3.360.0",
"@dqbd/tiktoken": "^1.0.7",
"@getzep/zep-js": "^0.3.1",
"@getzep/zep-js": "^0.4.1",
"@huggingface/inference": "^2.6.1",
"@notionhq/client": "^2.2.7",
"@notionhq/client": "^2.2.8",
"@opensearch-project/opensearch": "^1.2.0",
"@pinecone-database/pinecone": "^0.0.12",
"@pinecone-database/pinecone": "^0.0.14",
"@qdrant/js-client-rest": "^1.2.2",
"@supabase/supabase-js": "^2.21.0",
"@supabase/supabase-js": "^2.29.0",
"@types/js-yaml": "^4.0.5",
"@types/jsdom": "^21.1.1",
"axios": "^0.27.2",
@ -38,17 +38,21 @@
"form-data": "^4.0.0",
"graphql": "^16.6.0",
"html-to-text": "^9.0.5",
"langchain": "^0.0.110",
"langchain": "^0.0.117",
"linkifyjs": "^4.1.1",
"mammoth": "^1.5.1",
"moment": "^2.29.3",
"mysql2": "^3.5.1",
"node-fetch": "^2.6.11",
"node-html-markdown": "^1.3.0",
"notion-to-md": "^3.1.1",
"pdf-parse": "^1.1.1",
"pdfjs-dist": "^3.7.107",
"playwright": "^1.35.0",
"puppeteer": "^20.7.1",
"pyodide": ">=0.21.0-alpha.2",
"redis": "^4.6.7",
"replicate": "^0.12.3",
"srt-parser-2": "^1.2.3",
"vm2": "^3.9.19",
"weaviate-ts-client": "^1.1.0",

View File

@ -98,6 +98,7 @@ export interface INode extends INodeProperties {
}
init?(nodeData: INodeData, input: string, options?: ICommonObject): Promise<any>
run?(nodeData: INodeData, input: string, options?: ICommonObject): Promise<string | ICommonObject>
clearSessionMemory?(nodeData: INodeData, options?: ICommonObject): Promise<void>
}
export interface INodeData extends INodeProperties {

View File

@ -323,7 +323,7 @@ export async function xmlScrape(currentURL: string, limit: number): Promise<stri
}
const contentType: string | null = resp.headers.get('content-type')
if ((contentType && !contentType.includes('application/xml')) || !contentType) {
if ((contentType && !contentType.includes('application/xml') && !contentType.includes('text/xml')) || !contentType) {
if (process.env.DEBUG === 'true') console.error(`non xml response, content type: ${contentType}, on page: ${currentURL}`)
return urls
}
@ -339,7 +339,6 @@ export async function xmlScrape(currentURL: string, limit: number): Promise<stri
/*
* Get env variables
* @param {string} url
* @param {number} limit
* @returns {string[]}
*/
export const getEnvironmentVariable = (name: string): string | undefined => {
@ -418,7 +417,7 @@ export const getCredentialData = async (selectedCredentialId: string, options: I
id: selectedCredentialId
})
if (!credential) throw new Error(`Credential ${selectedCredentialId} not found`)
if (!credential) return {}
// Decrpyt credentialData
const decryptedCredentialData = await decryptCredentialData(credential.encryptedData)
@ -430,36 +429,52 @@ export const getCredentialData = async (selectedCredentialId: string, options: I
}
export const getCredentialParam = (paramName: string, credentialData: ICommonObject, nodeData: INodeData): any => {
return (nodeData.inputs as ICommonObject)[paramName] ?? credentialData[paramName]
return (nodeData.inputs as ICommonObject)[paramName] ?? credentialData[paramName] ?? undefined
}
/*
* List of dependencies allowed to be import in vm2
*/
export const availableDependencies = [
'@dqbd/tiktoken',
'@getzep/zep-js',
'@huggingface/inference',
'@pinecone-database/pinecone',
'@supabase/supabase-js',
'axios',
'cheerio',
'chromadb',
'cohere-ai',
'd3-dsv',
'form-data',
'graphql',
'html-to-text',
'langchain',
'linkifyjs',
'mammoth',
'moment',
'node-fetch',
'pdf-parse',
'pdfjs-dist',
'playwright',
'puppeteer',
'srt-parser-2',
'typeorm',
'weaviate-ts-client'
// reference https://www.freeformatter.com/json-escape.html
const jsonEscapeCharacters = [
{ escape: '"', value: 'FLOWISE_DOUBLE_QUOTE' },
{ escape: '\n', value: 'FLOWISE_NEWLINE' },
{ escape: '\b', value: 'FLOWISE_BACKSPACE' },
{ escape: '\f', value: 'FLOWISE_FORM_FEED' },
{ escape: '\r', value: 'FLOWISE_CARRIAGE_RETURN' },
{ escape: '\t', value: 'FLOWISE_TAB' },
{ escape: '\\', value: 'FLOWISE_BACKSLASH' }
]
function handleEscapesJSONParse(input: string, reverse: Boolean): string {
for (const element of jsonEscapeCharacters) {
input = reverse ? input.replaceAll(element.value, element.escape) : input.replaceAll(element.escape, element.value)
}
return input
}
function iterateEscapesJSONParse(input: any, reverse: Boolean): any {
for (const element in input) {
const type = typeof input[element]
if (type === 'string') input[element] = handleEscapesJSONParse(input[element], reverse)
else if (type === 'object') input[element] = iterateEscapesJSONParse(input[element], reverse)
}
return input
}
export function handleEscapeCharacters(input: any, reverse: Boolean): any {
const type = typeof input
if (type === 'string') return handleEscapesJSONParse(input, reverse)
else if (type === 'object') return iterateEscapesJSONParse(input, reverse)
return input
}
export const getUserHome = (): string => {
let variableName = 'HOME'
if (process.platform === 'win32') {
variableName = 'USERPROFILE'
}
if (process.env[variableName] === undefined) {
// If for some reason the variable does not exist, fall back to current folder
return process.cwd()
}
return process.env[variableName] as string
}

View File

@ -1,6 +1,6 @@
{
"compilerOptions": {
"lib": ["ES2020"],
"lib": ["ES2020", "ES2021.String"],
"experimentalDecorators": true /* Enable experimental support for TC39 stage 2 draft decorators. */,
"emitDecoratorMetadata": true /* Emit design-type metadata for decorated declarations in source files. */,
"target": "ES2020", // or higher

View File

@ -1,11 +1,22 @@
PORT=3000
PASSPHRASE=MYPASSPHRASE # Passphrase used to create encryption key
# DATABASE_PATH=/your_database_path/.flowise
# APIKEY_PATH=/your_api_key_path/.flowise
# SECRETKEY_PATH=/your_api_key_path/.flowise
# LOG_PATH=/your_log_path/.flowise/logs
# DATABASE_TYPE=postgres
# DATABASE_PORT=""
# DATABASE_HOST=""
# DATABASE_NAME="flowise"
# DATABASE_USER=""
# DATABASE_PASSWORD=""
# OVERRIDE_DATABASE=true
# FLOWISE_USERNAME=user
# FLOWISE_PASSWORD=1234
# DEBUG=true
# DATABASE_PATH=/your/database/path/.flowise
# APIKEY_PATH=/your/api/key/path
# SECRETKEY_PATH=/your/secret/key/path
# LOG_PATH=/your/log/path
# LOG_LEVEL=debug (error | warn | info | verbose | debug)
# EXECUTION_MODE=main (child | main)
# TOOL_FUNCTION_BUILTIN_DEP=crypto,fs
# TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash

View File

@ -31,21 +31,7 @@ FLOWISE_PASSWORD=1234
## 🌱 Env Variables
Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder.
| Variable | Description | Type | Default |
| ---------------- | ---------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- |
| PORT | The HTTP port Flowise runs on | Number | 3000 |
| PASSPHRASE | Passphrase used to create encryption key | String | `MYPASSPHRASE` |
| FLOWISE_USERNAME | Username to login | String |
| FLOWISE_PASSWORD | Password to login | String |
| DEBUG | Print logs from components | Boolean |
| LOG_PATH | Location where log files are stored | String | `your/path/Flowise/logs` |
| LOG_LEVEL | Different levels of logs | Enum String: `error`, `info`, `verbose`, `debug` | `info` |
| DATABASE_PATH | Location where database is saved | String | `your/home/dir/.flowise` |
| APIKEY_PATH | Location where api keys are saved | String | `your/path/Flowise/packages/server` |
| SECRETKEY_PATH | Location where encryption key (used to encrypt/decrypt credentials) is saved | String | `your/path/Flowise/packages/server` |
| EXECUTION_MODE | Whether predictions run in their own process or the main process | Enum String: `child`, `main` | `main` |
Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder. Read [more](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)
You can also specify the env variables when using `npx`. For example:

View File

@ -1,5 +1,5 @@
{
"description": "Use OpenAI Function Agent to automatically decide which API to call, generating url and body request from conversation",
"description": "Use OpenAI Function Agent and Chain to automatically decide which API to call, generating url and body request from conversation",
"nodes": [
{
"width": 300,

View File

@ -0,0 +1,226 @@
{
"description": "Analyse and summarize CSV data",
"nodes": [
{
"width": 300,
"height": 377,
"id": "csvAgent_0",
"position": {
"x": 1064.0780498701288,
"y": 284.44352695304724
},
"type": "customNode",
"data": {
"id": "csvAgent_0",
"label": "CSV Agent",
"name": "csvAgent",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor", "BaseChain"],
"category": "Agents",
"description": "Agent used to to answer queries on CSV data",
"inputParams": [
{
"label": "Csv File",
"name": "csvFile",
"type": "file",
"fileType": ".csv",
"id": "csvAgent_0-input-csvFile-file"
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "csvAgent_0-input-model-BaseLanguageModel"
}
],
"inputs": {
"model": "{{chatOpenAI_0.data.instance}}"
},
"outputAnchors": [
{
"id": "csvAgent_0-output-csvAgent-AgentExecutor|BaseChain",
"name": "csvAgent",
"label": "AgentExecutor",
"type": "AgentExecutor | BaseChain"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1064.0780498701288,
"y": 284.44352695304724
},
"dragging": false
},
{
"width": 300,
"height": 522,
"id": "chatOpenAI_0",
"position": {
"x": 657.3762197414501,
"y": 220.2950766042332
},
"type": "customNode",
"data": {
"id": "chatOpenAI_0",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
"description": "Wrapper around OpenAI large language models that use the Chat endpoint",
"inputParams": [
{
"label": "Connect Credential",
"name": "credential",
"type": "credential",
"credentialNames": ["openAIApi"],
"id": "chatOpenAI_0-input-credential-credential"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "gpt-4",
"name": "gpt-4"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-4-32k",
"name": "gpt-4-32k"
},
{
"label": "gpt-4-32k-0613",
"name": "gpt-4-32k-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
},
{
"label": "gpt-3.5-turbo-16k",
"name": "gpt-3.5-turbo-16k"
},
{
"label": "gpt-3.5-turbo-16k-0613",
"name": "gpt-3.5-turbo-16k-0613"
}
],
"default": "gpt-3.5-turbo",
"optional": true,
"id": "chatOpenAI_0-input-modelName-options"
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.9,
"optional": true,
"id": "chatOpenAI_0-input-temperature-number"
},
{
"label": "Max Tokens",
"name": "maxTokens",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-maxTokens-number"
},
{
"label": "Top Probability",
"name": "topP",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-topP-number"
},
{
"label": "Frequency Penalty",
"name": "frequencyPenalty",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-frequencyPenalty-number"
},
{
"label": "Presence Penalty",
"name": "presencePenalty",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-presencePenalty-number"
},
{
"label": "Timeout",
"name": "timeout",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-timeout-number"
},
{
"label": "BasePath",
"name": "basepath",
"type": "string",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-basepath-string"
}
],
"inputAnchors": [],
"inputs": {
"modelName": "gpt-3.5-turbo",
"temperature": 0.9,
"maxTokens": "",
"topP": "",
"frequencyPenalty": "",
"presencePenalty": "",
"timeout": "",
"basepath": ""
},
"outputAnchors": [
{
"id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"name": "chatOpenAI",
"label": "ChatOpenAI",
"type": "ChatOpenAI | BaseChatModel | BaseLanguageModel"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 657.3762197414501,
"y": 220.2950766042332
},
"dragging": false
}
],
"edges": [
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"target": "csvAgent_0",
"targetHandle": "csvAgent_0-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-csvAgent_0-csvAgent_0-input-model-BaseLanguageModel",
"data": {
"label": ""
}
}
]
}

View File

@ -3,132 +3,11 @@
"nodes": [
{
"width": 300,
"height": 376,
"id": "recursiveCharacterTextSplitter_1",
"position": {
"x": 422.81091375202413,
"y": 122.99825010325736
},
"type": "customNode",
"data": {
"id": "recursiveCharacterTextSplitter_1",
"label": "Recursive Character Text Splitter",
"name": "recursiveCharacterTextSplitter",
"type": "RecursiveCharacterTextSplitter",
"baseClasses": ["RecursiveCharacterTextSplitter", "TextSplitter"],
"category": "Text Splitters",
"description": "Split documents recursively by different characters - starting with \"\n\n\", then \"\n\", then \" \"",
"inputParams": [
{
"label": "Chunk Size",
"name": "chunkSize",
"type": "number",
"default": 1000,
"optional": true,
"id": "recursiveCharacterTextSplitter_1-input-chunkSize-number"
},
{
"label": "Chunk Overlap",
"name": "chunkOverlap",
"type": "number",
"optional": true,
"id": "recursiveCharacterTextSplitter_1-input-chunkOverlap-number"
}
],
"inputAnchors": [],
"inputs": {
"chunkSize": 1000,
"chunkOverlap": ""
},
"outputAnchors": [
{
"id": "recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter",
"name": "recursiveCharacterTextSplitter",
"label": "RecursiveCharacterTextSplitter",
"type": "RecursiveCharacterTextSplitter | TextSplitter"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 422.81091375202413,
"y": 122.99825010325736
},
"dragging": false
},
{
"width": 300,
"height": 392,
"id": "textFile_1",
"position": {
"x": 788.5395420616719,
"y": 126.30459801017741
},
"type": "customNode",
"data": {
"id": "textFile_1",
"label": "Text File",
"name": "textFile",
"type": "Document",
"baseClasses": ["Document"],
"category": "Document Loaders",
"description": "Load data from text files",
"inputParams": [
{
"label": "Txt File",
"name": "txtFile",
"type": "file",
"fileType": ".txt",
"id": "textFile_1-input-txtFile-file"
},
{
"label": "Metadata",
"name": "metadata",
"type": "json",
"optional": true,
"additionalParams": true,
"id": "textFile_1-input-metadata-json"
}
],
"inputAnchors": [
{
"label": "Text Splitter",
"name": "textSplitter",
"type": "TextSplitter",
"optional": true,
"id": "textFile_1-input-textSplitter-TextSplitter"
}
],
"inputs": {
"textSplitter": "{{recursiveCharacterTextSplitter_1.data.instance}}"
},
"outputAnchors": [
{
"id": "textFile_1-output-textFile-Document",
"name": "textFile",
"label": "Document",
"type": "Document"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 788.5395420616719,
"y": 126.30459801017741
},
"dragging": false
},
{
"width": 300,
"height": 523,
"height": 522,
"id": "chatOpenAI_0",
"position": {
"x": 1200.8283780918746,
"y": 12.591428916196605
"x": 1184.1176114500388,
"y": -44.15535835370571
},
"type": "customNode",
"data": {
@ -249,7 +128,7 @@
"inputAnchors": [],
"inputs": {
"modelName": "gpt-3.5-turbo",
"temperature": 0.9,
"temperature": "0",
"maxTokens": "",
"topP": "",
"frequencyPenalty": "",
@ -268,129 +147,20 @@
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1200.8283780918746,
"y": 12.591428916196605
"x": 1184.1176114500388,
"y": -44.15535835370571
},
"selected": false,
"dragging": false
},
{
"width": 300,
"height": 480,
"id": "conversationalRetrievalQAChain_0",
"position": {
"x": 1626.7402076624098,
"y": 394.29095783927113
},
"type": "customNode",
"data": {
"id": "conversationalRetrievalQAChain_0",
"label": "Conversational Retrieval QA Chain",
"name": "conversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain",
"baseClasses": ["ConversationalRetrievalQAChain", "BaseChain"],
"category": "Chains",
"description": "Document QA - built on RetrievalQAChain to provide a chat history component",
"inputParams": [
{
"label": "Return Source Documents",
"name": "returnSourceDocuments",
"type": "boolean",
"optional": true,
"id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean"
},
{
"label": "System Message",
"name": "systemMessagePrompt",
"type": "string",
"rows": 4,
"additionalParams": true,
"optional": true,
"placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.",
"id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string"
},
{
"label": "Chain Option",
"name": "chainOption",
"type": "options",
"options": [
{
"label": "MapReduceDocumentsChain",
"name": "map_reduce",
"description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time"
},
{
"label": "RefineDocumentsChain",
"name": "refine",
"description": "Suitable for QA tasks over a large number of documents."
},
{
"label": "StuffDocumentsChain",
"name": "stuff",
"description": "Suitable for QA tasks over a small number of documents."
}
],
"additionalParams": true,
"optional": true,
"id": "conversationalRetrievalQAChain_0-input-chainOption-options"
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel"
},
{
"label": "Vector Store Retriever",
"name": "vectorStoreRetriever",
"type": "BaseRetriever",
"id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever"
},
{
"label": "Memory",
"name": "memory",
"type": "DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory",
"optional": true,
"description": "If no memory connected, default BufferMemory will be used",
"id": "conversationalRetrievalQAChain_0-input-memory-DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory"
}
],
"inputs": {
"model": "{{chatOpenAI_0.data.instance}}",
"vectorStoreRetriever": "{{pineconeUpsert_0.data.instance}}",
"memory": "",
"returnSourceDocuments": "",
"systemMessagePrompt": "",
"chainOption": ""
},
"outputAnchors": [
{
"id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-ConversationalRetrievalQAChain|BaseChain",
"name": "conversationalRetrievalQAChain",
"label": "ConversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain | BaseChain"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1626.7402076624098,
"y": 394.29095783927113
},
"dragging": false
},
{
"width": 300,
"height": 329,
"height": 328,
"id": "openAIEmbeddings_0",
"position": {
"x": 788.1802387155774,
"y": 579.9826205586049
"x": 795.6162477805387,
"y": 603.260214150876
},
"type": "customNode",
"data": {
@ -462,18 +232,18 @@
},
"selected": false,
"positionAbsolute": {
"x": 788.1802387155774,
"y": 579.9826205586049
"x": 795.6162477805387,
"y": 603.260214150876
},
"dragging": false
},
{
"width": 300,
"height": 555,
"height": 554,
"id": "pineconeUpsert_0",
"position": {
"x": 1200.8283780918746,
"y": 588.825080688097
"x": 1191.1792786926865,
"y": 514.2126330994578
},
"type": "customNode",
"data": {
@ -534,7 +304,7 @@
}
],
"inputs": {
"document": ["{{textFile_1.data.instance}}"],
"document": ["{{textFile_0.data.instance}}"],
"embeddings": "{{openAIEmbeddings_0.data.instance}}",
"pineconeIndex": "",
"pineconeNamespace": "",
@ -569,20 +339,272 @@
},
"selected": false,
"positionAbsolute": {
"x": 1200.8283780918746,
"y": 588.825080688097
"x": 1191.1792786926865,
"y": 514.2126330994578
},
"dragging": false
},
{
"width": 300,
"height": 376,
"id": "recursiveCharacterTextSplitter_0",
"position": {
"x": 406.08456707531263,
"y": 197.66460328693972
},
"type": "customNode",
"data": {
"id": "recursiveCharacterTextSplitter_0",
"label": "Recursive Character Text Splitter",
"name": "recursiveCharacterTextSplitter",
"type": "RecursiveCharacterTextSplitter",
"baseClasses": ["RecursiveCharacterTextSplitter", "TextSplitter"],
"category": "Text Splitters",
"description": "Split documents recursively by different characters - starting with \"\\n\\n\", then \"\\n\", then \" \"",
"inputParams": [
{
"label": "Chunk Size",
"name": "chunkSize",
"type": "number",
"default": 1000,
"optional": true,
"id": "recursiveCharacterTextSplitter_0-input-chunkSize-number"
},
{
"label": "Chunk Overlap",
"name": "chunkOverlap",
"type": "number",
"optional": true,
"id": "recursiveCharacterTextSplitter_0-input-chunkOverlap-number"
}
],
"inputAnchors": [],
"inputs": {
"chunkSize": 1000,
"chunkOverlap": ""
},
"outputAnchors": [
{
"id": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter",
"name": "recursiveCharacterTextSplitter",
"label": "RecursiveCharacterTextSplitter",
"type": "RecursiveCharacterTextSplitter | TextSplitter"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 406.08456707531263,
"y": 197.66460328693972
},
"dragging": false
},
{
"width": 300,
"height": 410,
"id": "textFile_0",
"position": {
"x": 786.5497697231324,
"y": 140.09563157584407
},
"type": "customNode",
"data": {
"id": "textFile_0",
"label": "Text File",
"name": "textFile",
"type": "Document",
"baseClasses": ["Document"],
"category": "Document Loaders",
"description": "Load data from text files",
"inputParams": [
{
"label": "Txt File",
"name": "txtFile",
"type": "file",
"fileType": ".txt",
"id": "textFile_0-input-txtFile-file"
},
{
"label": "Metadata",
"name": "metadata",
"type": "json",
"optional": true,
"additionalParams": true,
"id": "textFile_0-input-metadata-json"
}
],
"inputAnchors": [
{
"label": "Text Splitter",
"name": "textSplitter",
"type": "TextSplitter",
"optional": true,
"id": "textFile_0-input-textSplitter-TextSplitter"
}
],
"inputs": {
"textSplitter": "{{recursiveCharacterTextSplitter_0.data.instance}}",
"metadata": ""
},
"outputAnchors": [
{
"id": "textFile_0-output-textFile-Document",
"name": "textFile",
"label": "Document",
"type": "Document"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 786.5497697231324,
"y": 140.09563157584407
},
"dragging": false
},
{
"width": 300,
"height": 479,
"id": "conversationalRetrievalQAChain_0",
"position": {
"x": 1558.6564094656787,
"y": 386.60217819991124
},
"type": "customNode",
"data": {
"id": "conversationalRetrievalQAChain_0",
"label": "Conversational Retrieval QA Chain",
"name": "conversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain",
"baseClasses": ["ConversationalRetrievalQAChain", "BaseChain"],
"category": "Chains",
"description": "Document QA - built on RetrievalQAChain to provide a chat history component",
"inputParams": [
{
"label": "Return Source Documents",
"name": "returnSourceDocuments",
"type": "boolean",
"optional": true,
"id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean"
},
{
"label": "System Message",
"name": "systemMessagePrompt",
"type": "string",
"rows": 4,
"additionalParams": true,
"optional": true,
"placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.",
"id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string"
},
{
"label": "Chain Option",
"name": "chainOption",
"type": "options",
"options": [
{
"label": "MapReduceDocumentsChain",
"name": "map_reduce",
"description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time"
},
{
"label": "RefineDocumentsChain",
"name": "refine",
"description": "Suitable for QA tasks over a large number of documents."
},
{
"label": "StuffDocumentsChain",
"name": "stuff",
"description": "Suitable for QA tasks over a small number of documents."
}
],
"additionalParams": true,
"optional": true,
"id": "conversationalRetrievalQAChain_0-input-chainOption-options"
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel"
},
{
"label": "Vector Store Retriever",
"name": "vectorStoreRetriever",
"type": "BaseRetriever",
"id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever"
},
{
"label": "Memory",
"name": "memory",
"type": "BaseMemory",
"optional": true,
"description": "If left empty, a default BufferMemory will be used",
"id": "conversationalRetrievalQAChain_0-input-memory-BaseMemory"
}
],
"inputs": {
"model": "{{chatOpenAI_0.data.instance}}",
"vectorStoreRetriever": "{{pineconeUpsert_0.data.instance}}",
"memory": "",
"returnSourceDocuments": "",
"systemMessagePrompt": "",
"chainOption": ""
},
"outputAnchors": [
{
"id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-ConversationalRetrievalQAChain|BaseChain",
"name": "conversationalRetrievalQAChain",
"label": "ConversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain | BaseChain"
}
],
"outputs": {},
"selected": false
},
"positionAbsolute": {
"x": 1558.6564094656787,
"y": 386.60217819991124
},
"selected": false
}
],
"edges": [
{
"source": "recursiveCharacterTextSplitter_1",
"sourceHandle": "recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter",
"target": "textFile_1",
"targetHandle": "textFile_1-input-textSplitter-TextSplitter",
"source": "openAIEmbeddings_0",
"sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
"target": "pineconeUpsert_0",
"targetHandle": "pineconeUpsert_0-input-embeddings-Embeddings",
"type": "buttonedge",
"id": "recursiveCharacterTextSplitter_1-recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter-textFile_1-textFile_1-input-textSplitter-TextSplitter",
"id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeUpsert_0-pineconeUpsert_0-input-embeddings-Embeddings",
"data": {
"label": ""
}
},
{
"source": "recursiveCharacterTextSplitter_0",
"sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter",
"target": "textFile_0",
"targetHandle": "textFile_0-input-textSplitter-TextSplitter",
"type": "buttonedge",
"id": "recursiveCharacterTextSplitter_0-recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter-textFile_0-textFile_0-input-textSplitter-TextSplitter",
"data": {
"label": ""
}
},
{
"source": "textFile_0",
"sourceHandle": "textFile_0-output-textFile-Document",
"target": "pineconeUpsert_0",
"targetHandle": "pineconeUpsert_0-input-document-Document",
"type": "buttonedge",
"id": "textFile_0-textFile_0-output-textFile-Document-pineconeUpsert_0-pineconeUpsert_0-input-document-Document",
"data": {
"label": ""
}
@ -598,28 +620,6 @@
"label": ""
}
},
{
"source": "textFile_1",
"sourceHandle": "textFile_1-output-textFile-Document",
"target": "pineconeUpsert_0",
"targetHandle": "pineconeUpsert_0-input-document-Document",
"type": "buttonedge",
"id": "textFile_1-textFile_1-output-textFile-Document-pineconeUpsert_0-pineconeUpsert_0-input-document-Document",
"data": {
"label": ""
}
},
{
"source": "openAIEmbeddings_0",
"sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
"target": "pineconeUpsert_0",
"targetHandle": "pineconeUpsert_0-input-embeddings-Embeddings",
"type": "buttonedge",
"id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeUpsert_0-pineconeUpsert_0-input-embeddings-Embeddings",
"data": {
"label": ""
}
},
{
"source": "pineconeUpsert_0",
"sourceHandle": "pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever",

View File

@ -0,0 +1,637 @@
{
"description": "Flowise Docs Github QnA using conversational retrieval QA chain",
"nodes": [
{
"width": 300,
"height": 376,
"id": "markdownTextSplitter_0",
"position": {
"x": 1081.1540334344143,
"y": -113.73571627207801
},
"type": "customNode",
"data": {
"id": "markdownTextSplitter_0",
"label": "Markdown Text Splitter",
"name": "markdownTextSplitter",
"type": "MarkdownTextSplitter",
"baseClasses": ["MarkdownTextSplitter", "RecursiveCharacterTextSplitter", "TextSplitter", "BaseDocumentTransformer"],
"category": "Text Splitters",
"description": "Split your content into documents based on the Markdown headers",
"inputParams": [
{
"label": "Chunk Size",
"name": "chunkSize",
"type": "number",
"default": 1000,
"optional": true,
"id": "markdownTextSplitter_0-input-chunkSize-number"
},
{
"label": "Chunk Overlap",
"name": "chunkOverlap",
"type": "number",
"optional": true,
"id": "markdownTextSplitter_0-input-chunkOverlap-number"
}
],
"inputAnchors": [],
"inputs": {
"chunkSize": "4000",
"chunkOverlap": ""
},
"outputAnchors": [
{
"id": "markdownTextSplitter_0-output-markdownTextSplitter-MarkdownTextSplitter|RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer",
"name": "markdownTextSplitter",
"label": "MarkdownTextSplitter",
"type": "MarkdownTextSplitter | RecursiveCharacterTextSplitter | TextSplitter | BaseDocumentTransformer"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1081.1540334344143,
"y": -113.73571627207801
},
"dragging": false
},
{
"width": 300,
"height": 405,
"id": "memoryVectorStore_0",
"position": {
"x": 1844.88052464165,
"y": 484.60473328470243
},
"type": "customNode",
"data": {
"id": "memoryVectorStore_0",
"label": "In-Memory Vector Store",
"name": "memoryVectorStore",
"type": "Memory",
"baseClasses": ["Memory", "VectorStoreRetriever", "BaseRetriever"],
"category": "Vector Stores",
"description": "In-memory vectorstore that stores embeddings and does an exact, linear search for the most similar embeddings.",
"inputParams": [
{
"label": "Top K",
"name": "topK",
"description": "Number of top results to fetch. Default to 4",
"placeholder": "4",
"type": "number",
"optional": true,
"id": "memoryVectorStore_0-input-topK-number"
}
],
"inputAnchors": [
{
"label": "Document",
"name": "document",
"type": "Document",
"list": true,
"id": "memoryVectorStore_0-input-document-Document"
},
{
"label": "Embeddings",
"name": "embeddings",
"type": "Embeddings",
"id": "memoryVectorStore_0-input-embeddings-Embeddings"
}
],
"inputs": {
"document": ["{{github_0.data.instance}}"],
"embeddings": "{{openAIEmbeddings_0.data.instance}}",
"topK": ""
},
"outputAnchors": [
{
"name": "output",
"label": "Output",
"type": "options",
"options": [
{
"id": "memoryVectorStore_0-output-retriever-Memory|VectorStoreRetriever|BaseRetriever",
"name": "retriever",
"label": "Memory Retriever",
"type": "Memory | VectorStoreRetriever | BaseRetriever"
},
{
"id": "memoryVectorStore_0-output-vectorStore-Memory|VectorStore",
"name": "vectorStore",
"label": "Memory Vector Store",
"type": "Memory | VectorStore"
}
],
"default": "retriever"
}
],
"outputs": {
"output": "retriever"
},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1844.88052464165,
"y": 484.60473328470243
},
"dragging": false
},
{
"width": 300,
"height": 479,
"id": "conversationalRetrievalQAChain_0",
"position": {
"x": 2311.697827287373,
"y": 228.14841720207832
},
"type": "customNode",
"data": {
"id": "conversationalRetrievalQAChain_0",
"label": "Conversational Retrieval QA Chain",
"name": "conversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain",
"baseClasses": ["ConversationalRetrievalQAChain", "BaseChain"],
"category": "Chains",
"description": "Document QA - built on RetrievalQAChain to provide a chat history component",
"inputParams": [
{
"label": "Return Source Documents",
"name": "returnSourceDocuments",
"type": "boolean",
"optional": true,
"id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean"
},
{
"label": "System Message",
"name": "systemMessagePrompt",
"type": "string",
"rows": 4,
"additionalParams": true,
"optional": true,
"placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.",
"id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string"
},
{
"label": "Chain Option",
"name": "chainOption",
"type": "options",
"options": [
{
"label": "MapReduceDocumentsChain",
"name": "map_reduce",
"description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time"
},
{
"label": "RefineDocumentsChain",
"name": "refine",
"description": "Suitable for QA tasks over a large number of documents."
},
{
"label": "StuffDocumentsChain",
"name": "stuff",
"description": "Suitable for QA tasks over a small number of documents."
}
],
"additionalParams": true,
"optional": true,
"id": "conversationalRetrievalQAChain_0-input-chainOption-options"
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel"
},
{
"label": "Vector Store Retriever",
"name": "vectorStoreRetriever",
"type": "BaseRetriever",
"id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever"
},
{
"label": "Memory",
"name": "memory",
"type": "BaseMemory",
"optional": true,
"description": "If left empty, a default BufferMemory will be used",
"id": "conversationalRetrievalQAChain_0-input-memory-BaseMemory"
}
],
"inputs": {
"model": "{{chatOpenAI_0.data.instance}}",
"vectorStoreRetriever": "{{memoryVectorStore_0.data.instance}}",
"memory": "",
"returnSourceDocuments": true,
"systemMessagePrompt": "",
"chainOption": ""
},
"outputAnchors": [
{
"id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-ConversationalRetrievalQAChain|BaseChain",
"name": "conversationalRetrievalQAChain",
"label": "ConversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain | BaseChain"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"dragging": false,
"positionAbsolute": {
"x": 2311.697827287373,
"y": 228.14841720207832
}
},
{
"width": 300,
"height": 673,
"id": "github_0",
"position": {
"x": 1460.1858988997,
"y": -137.83585695472374
},
"type": "customNode",
"data": {
"id": "github_0",
"label": "Github",
"name": "github",
"type": "Document",
"baseClasses": ["Document"],
"category": "Document Loaders",
"description": "Load data from a GitHub repository",
"inputParams": [
{
"label": "Connect Credential",
"name": "credential",
"type": "credential",
"description": "Only needed when accessing private repo",
"optional": true,
"credentialNames": ["githubApi"],
"id": "github_0-input-credential-credential"
},
{
"label": "Repo Link",
"name": "repoLink",
"type": "string",
"placeholder": "https://github.com/FlowiseAI/Flowise",
"id": "github_0-input-repoLink-string"
},
{
"label": "Branch",
"name": "branch",
"type": "string",
"default": "main",
"id": "github_0-input-branch-string"
},
{
"label": "Recursive",
"name": "recursive",
"type": "boolean",
"optional": true,
"id": "github_0-input-recursive-boolean"
},
{
"label": "Metadata",
"name": "metadata",
"type": "json",
"optional": true,
"additionalParams": true,
"id": "github_0-input-metadata-json"
}
],
"inputAnchors": [
{
"label": "Text Splitter",
"name": "textSplitter",
"type": "TextSplitter",
"optional": true,
"id": "github_0-input-textSplitter-TextSplitter"
}
],
"inputs": {
"repoLink": "https://github.com/FlowiseAI/FlowiseDocs",
"branch": "main",
"recursive": true,
"textSplitter": "{{markdownTextSplitter_0.data.instance}}",
"metadata": ""
},
"outputAnchors": [
{
"id": "github_0-output-github-Document",
"name": "github",
"label": "Document",
"type": "Document"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1460.1858988997,
"y": -137.83585695472374
},
"dragging": false
},
{
"width": 300,
"height": 522,
"id": "chatOpenAI_0",
"position": {
"x": 1857.367353502965,
"y": -104.25095383414119
},
"type": "customNode",
"data": {
"id": "chatOpenAI_0",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
"description": "Wrapper around OpenAI large language models that use the Chat endpoint",
"inputParams": [
{
"label": "Connect Credential",
"name": "credential",
"type": "credential",
"credentialNames": ["openAIApi"],
"id": "chatOpenAI_0-input-credential-credential"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "gpt-4",
"name": "gpt-4"
},
{
"label": "gpt-4-0613",
"name": "gpt-4-0613"
},
{
"label": "gpt-4-32k",
"name": "gpt-4-32k"
},
{
"label": "gpt-4-32k-0613",
"name": "gpt-4-32k-0613"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
},
{
"label": "gpt-3.5-turbo-0613",
"name": "gpt-3.5-turbo-0613"
},
{
"label": "gpt-3.5-turbo-16k",
"name": "gpt-3.5-turbo-16k"
},
{
"label": "gpt-3.5-turbo-16k-0613",
"name": "gpt-3.5-turbo-16k-0613"
}
],
"default": "gpt-3.5-turbo",
"optional": true,
"id": "chatOpenAI_0-input-modelName-options"
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.9,
"optional": true,
"id": "chatOpenAI_0-input-temperature-number"
},
{
"label": "Max Tokens",
"name": "maxTokens",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-maxTokens-number"
},
{
"label": "Top Probability",
"name": "topP",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-topP-number"
},
{
"label": "Frequency Penalty",
"name": "frequencyPenalty",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-frequencyPenalty-number"
},
{
"label": "Presence Penalty",
"name": "presencePenalty",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-presencePenalty-number"
},
{
"label": "Timeout",
"name": "timeout",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-timeout-number"
},
{
"label": "BasePath",
"name": "basepath",
"type": "string",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-basepath-string"
}
],
"inputAnchors": [],
"inputs": {
"modelName": "gpt-3.5-turbo",
"temperature": 0.9,
"maxTokens": "",
"topP": "",
"frequencyPenalty": "",
"presencePenalty": "",
"timeout": "",
"basepath": ""
},
"outputAnchors": [
{
"id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"name": "chatOpenAI",
"label": "ChatOpenAI",
"type": "ChatOpenAI | BaseChatModel | BaseLanguageModel"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1857.367353502965,
"y": -104.25095383414119
},
"dragging": false
},
{
"width": 300,
"height": 328,
"id": "openAIEmbeddings_0",
"position": {
"x": 1299.9983863833309,
"y": 581.8406384863323
},
"type": "customNode",
"data": {
"id": "openAIEmbeddings_0",
"label": "OpenAI Embeddings",
"name": "openAIEmbeddings",
"type": "OpenAIEmbeddings",
"baseClasses": ["OpenAIEmbeddings", "Embeddings"],
"category": "Embeddings",
"description": "OpenAI API to generate embeddings for a given text",
"inputParams": [
{
"label": "Connect Credential",
"name": "credential",
"type": "credential",
"credentialNames": ["openAIApi"],
"id": "openAIEmbeddings_0-input-credential-credential"
},
{
"label": "Strip New Lines",
"name": "stripNewLines",
"type": "boolean",
"optional": true,
"additionalParams": true,
"id": "openAIEmbeddings_0-input-stripNewLines-boolean"
},
{
"label": "Batch Size",
"name": "batchSize",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "openAIEmbeddings_0-input-batchSize-number"
},
{
"label": "Timeout",
"name": "timeout",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "openAIEmbeddings_0-input-timeout-number"
},
{
"label": "BasePath",
"name": "basepath",
"type": "string",
"optional": true,
"additionalParams": true,
"id": "openAIEmbeddings_0-input-basepath-string"
}
],
"inputAnchors": [],
"inputs": {
"stripNewLines": "",
"batchSize": "",
"timeout": "",
"basepath": ""
},
"outputAnchors": [
{
"id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
"name": "openAIEmbeddings",
"label": "OpenAIEmbeddings",
"type": "OpenAIEmbeddings | Embeddings"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"dragging": false,
"positionAbsolute": {
"x": 1299.9983863833309,
"y": 581.8406384863323
}
}
],
"edges": [
{
"source": "memoryVectorStore_0",
"sourceHandle": "memoryVectorStore_0-output-retriever-Memory|VectorStoreRetriever|BaseRetriever",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"type": "buttonedge",
"id": "memoryVectorStore_0-memoryVectorStore_0-output-retriever-Memory|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"data": {
"label": ""
}
},
{
"source": "markdownTextSplitter_0",
"sourceHandle": "markdownTextSplitter_0-output-markdownTextSplitter-MarkdownTextSplitter|RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer",
"target": "github_0",
"targetHandle": "github_0-input-textSplitter-TextSplitter",
"type": "buttonedge",
"id": "markdownTextSplitter_0-markdownTextSplitter_0-output-markdownTextSplitter-MarkdownTextSplitter|RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer-github_0-github_0-input-textSplitter-TextSplitter",
"data": {
"label": ""
}
},
{
"source": "github_0",
"sourceHandle": "github_0-output-github-Document",
"target": "memoryVectorStore_0",
"targetHandle": "memoryVectorStore_0-input-document-Document",
"type": "buttonedge",
"id": "github_0-github_0-output-github-Document-memoryVectorStore_0-memoryVectorStore_0-input-document-Document",
"data": {
"label": ""
}
},
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel",
"data": {
"label": ""
}
},
{
"source": "openAIEmbeddings_0",
"sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
"target": "memoryVectorStore_0",
"targetHandle": "memoryVectorStore_0-input-embeddings-Embeddings",
"type": "buttonedge",
"id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-memoryVectorStore_0-memoryVectorStore_0-input-embeddings-Embeddings",
"data": {
"label": ""
}
}
]
}

View File

@ -135,15 +135,16 @@
{
"label": "Memory",
"name": "memory",
"type": "DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory",
"type": "BaseMemory",
"optional": true,
"description": "If no memory connected, default BufferMemory will be used",
"id": "conversationalRetrievalQAChain_0-input-memory-DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory"
"description": "If left empty, a default BufferMemory will be used",
"id": "conversationalRetrievalQAChain_0-input-memory-BaseMemory"
}
],
"inputs": {
"model": "{{chatLocalAI_0.data.instance}}",
"vectorStoreRetriever": "{{faissUpsert_0.data.instance}}"
"vectorStoreRetriever": "{{faissUpsert_0.data.instance}}",
"memory": ""
},
"outputAnchors": [
{

View File

@ -78,10 +78,10 @@
{
"label": "Memory",
"name": "memory",
"type": "DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory",
"type": "BaseMemory",
"optional": true,
"description": "If no memory connected, default BufferMemory will be used",
"id": "conversationalRetrievalQAChain_0-input-memory-DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory"
"description": "If left empty, a default BufferMemory will be used",
"id": "conversationalRetrievalQAChain_0-input-memory-BaseMemory"
}
],
"inputs": {
@ -631,9 +631,9 @@
"source": "ZepMemory_0",
"sourceHandle": "ZepMemory_0-output-ZepMemory-ZepMemory|BaseChatMemory|BaseMemory",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-memory-DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory",
"targetHandle": "conversationalRetrievalQAChain_0-input-memory-BaseMemory",
"type": "buttonedge",
"id": "ZepMemory_0-ZepMemory_0-output-ZepMemory-ZepMemory|BaseChatMemory|BaseMemory-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-memory-DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory",
"id": "ZepMemory_0-ZepMemory_0-output-ZepMemory-ZepMemory|BaseChatMemory|BaseMemory-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-memory-BaseMemory",
"data": {
"label": ""
}

View File

@ -78,10 +78,10 @@
{
"label": "Memory",
"name": "memory",
"type": "DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory",
"type": "BaseMemory",
"optional": true,
"description": "If no memory connected, default BufferMemory will be used",
"id": "conversationalRetrievalQAChain_0-input-memory-DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory"
"description": "If left empty, a default BufferMemory will be used",
"id": "conversationalRetrievalQAChain_0-input-memory-BaseMemory"
}
],
"inputs": {

View File

@ -283,10 +283,10 @@
{
"label": "Memory",
"name": "memory",
"type": "DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory",
"type": "BaseMemory",
"optional": true,
"description": "If no memory connected, default BufferMemory will be used",
"id": "conversationalRetrievalQAChain_0-input-memory-DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory"
"description": "If left empty, a default BufferMemory will be used",
"id": "conversationalRetrievalQAChain_0-input-memory-BaseMemory"
}
],
"inputs": {

View File

@ -0,0 +1,273 @@
{
"description": "Use Replicate API that runs Llama 13b v2 model with LLMChain",
"nodes": [
{
"width": 300,
"height": 405,
"id": "llmChain_1",
"position": {
"x": 967.581544453458,
"y": 320.56761595884564
},
"type": "customNode",
"data": {
"id": "llmChain_1",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["LLMChain", "BaseChain", "BaseLangChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Chain Name",
"name": "chainName",
"type": "string",
"placeholder": "Name Your Chain",
"optional": true,
"id": "llmChain_1-input-chainName-string"
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "llmChain_1-input-model-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_1-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"model": "{{replicate_0.data.instance}}",
"prompt": "{{promptTemplate_0.data.instance}}",
"chainName": ""
},
"outputAnchors": [
{
"name": "output",
"label": "Output",
"type": "options",
"options": [
{
"id": "llmChain_1-output-llmChain-LLMChain|BaseChain|BaseLangChain",
"name": "llmChain",
"label": "LLM Chain",
"type": "LLMChain | BaseChain | BaseLangChain"
},
{
"id": "llmChain_1-output-outputPrediction-string|json",
"name": "outputPrediction",
"label": "Output Prediction",
"type": "string | json"
}
],
"default": "llmChain"
}
],
"outputs": {
"output": "llmChain"
},
"selected": false
},
"positionAbsolute": {
"x": 967.581544453458,
"y": 320.56761595884564
},
"selected": false,
"dragging": false
},
{
"width": 300,
"height": 475,
"id": "promptTemplate_0",
"position": {
"x": 269.2203229225663,
"y": 129.02909641085535
},
"type": "customNode",
"data": {
"id": "promptTemplate_0",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 4,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_0-input-template-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "json",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "promptTemplate_0-input-promptValues-json"
}
],
"inputAnchors": [],
"inputs": {
"template": "Assistant: You are a helpful assistant. You do not respond as 'User' or pretend to be 'User'. You only respond once as Assistant.\nUser: {query}\nAssistant:",
"promptValues": "{\"query\":\"{{question}}\"}"
},
"outputAnchors": [
{
"id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 269.2203229225663,
"y": 129.02909641085535
},
"dragging": false
},
{
"width": 300,
"height": 527,
"id": "replicate_0",
"position": {
"x": 607.4915400488668,
"y": -60.643337207007804
},
"type": "customNode",
"data": {
"id": "replicate_0",
"label": "Replicate",
"name": "replicate",
"type": "Replicate",
"baseClasses": ["Replicate", "LLM", "BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Use Replicate to run open source models on cloud",
"inputParams": [
{
"label": "Replicate Api Key",
"name": "replicateApiKey",
"type": "password",
"id": "replicate_0-input-replicateApiKey-password"
},
{
"label": "Model",
"name": "model",
"type": "string",
"placeholder": "a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5",
"optional": true,
"id": "replicate_0-input-model-string"
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"description": "Adjusts randomness of outputs, greater than 1 is random and 0 is deterministic, 0.75 is a good starting value.",
"default": 0.7,
"optional": true,
"id": "replicate_0-input-temperature-number"
},
{
"label": "Max Tokens",
"name": "maxTokens",
"type": "number",
"description": "Maximum number of tokens to generate. A word is generally 2-3 tokens",
"optional": true,
"additionalParams": true,
"id": "replicate_0-input-maxTokens-number"
},
{
"label": "Top Probability",
"name": "topP",
"type": "number",
"description": "When decoding text, samples from the top p percentage of most likely tokens; lower to ignore less likely tokens",
"optional": true,
"additionalParams": true,
"id": "replicate_0-input-topP-number"
},
{
"label": "Repetition Penalty",
"name": "repetitionPenalty",
"type": "number",
"description": "Penalty for repeated words in generated text; 1 is no penalty, values greater than 1 discourage repetition, less than 1 encourage it. (minimum: 0.01; maximum: 5)",
"optional": true,
"additionalParams": true,
"id": "replicate_0-input-repetitionPenalty-number"
},
{
"label": "Additional Inputs",
"name": "additionalInputs",
"type": "json",
"description": "Each model has different parameters, refer to the specific model accepted inputs. For example: <a target=\"_blank\" href=\"https://replicate.com/a16z-infra/llama13b-v2-chat/api#inputs\">llama13b-v2</a>",
"additionalParams": true,
"optional": true,
"id": "replicate_0-input-additionalInputs-json"
}
],
"inputAnchors": [],
"inputs": {
"model": "a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5",
"temperature": 0.7,
"maxTokens": "",
"topP": "",
"repetitionPenalty": "",
"additionalInputs": ""
},
"outputAnchors": [
{
"id": "replicate_0-output-replicate-Replicate|LLM|BaseLLM|BaseLanguageModel",
"name": "replicate",
"label": "Replicate",
"type": "Replicate | LLM | BaseLLM | BaseLanguageModel"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 607.4915400488668,
"y": -60.643337207007804
},
"dragging": false
}
],
"edges": [
{
"source": "promptTemplate_0",
"sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_1",
"targetHandle": "llmChain_1-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}
},
{
"source": "replicate_0",
"sourceHandle": "replicate_0-output-replicate-Replicate|LLM|BaseLLM|BaseLanguageModel",
"target": "llmChain_1",
"targetHandle": "llmChain_1-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "replicate_0-replicate_0-output-replicate-Replicate|LLM|BaseLLM|BaseLanguageModel-llmChain_1-llmChain_1-input-model-BaseLanguageModel",
"data": {
"label": ""
}
}
]
}

View File

@ -1,122 +1,13 @@
{
"description": "Scrape web pages for QnA using conversational retrieval QA chain",
"description": "Scrape web pages for QnA with long term memory Motorhead and return source documents",
"nodes": [
{
"width": 300,
"height": 480,
"id": "conversationalRetrievalQAChain_0",
"position": {
"x": 1574.2192193338556,
"y": 334.4358233039927
},
"type": "customNode",
"data": {
"id": "conversationalRetrievalQAChain_0",
"label": "Conversational Retrieval QA Chain",
"name": "conversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain",
"baseClasses": ["ConversationalRetrievalQAChain", "BaseChain"],
"category": "Chains",
"description": "Document QA - built on RetrievalQAChain to provide a chat history component",
"inputParams": [
{
"label": "Return Source Documents",
"name": "returnSourceDocuments",
"type": "boolean",
"optional": true,
"id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean"
},
{
"label": "System Message",
"name": "systemMessagePrompt",
"type": "string",
"rows": 4,
"additionalParams": true,
"optional": true,
"placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.",
"id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string"
},
{
"label": "Chain Option",
"name": "chainOption",
"type": "options",
"options": [
{
"label": "MapReduceDocumentsChain",
"name": "map_reduce",
"description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time"
},
{
"label": "RefineDocumentsChain",
"name": "refine",
"description": "Suitable for QA tasks over a large number of documents."
},
{
"label": "StuffDocumentsChain",
"name": "stuff",
"description": "Suitable for QA tasks over a small number of documents."
}
],
"additionalParams": true,
"optional": true,
"id": "conversationalRetrievalQAChain_0-input-chainOption-options"
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel"
},
{
"label": "Vector Store Retriever",
"name": "vectorStoreRetriever",
"type": "BaseRetriever",
"id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever"
},
{
"label": "Memory",
"name": "memory",
"type": "DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory",
"optional": true,
"description": "If no memory connected, default BufferMemory will be used",
"id": "conversationalRetrievalQAChain_0-input-memory-DynamoDBChatMemory | RedisBackedChatMemory | ZepMemory"
}
],
"inputs": {
"model": "{{chatOpenAI_0.data.instance}}",
"vectorStoreRetriever": "{{pineconeUpsert_0.data.instance}}",
"memory": "",
"returnSourceDocuments": "",
"systemMessagePrompt": "",
"chainOption": ""
},
"outputAnchors": [
{
"id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-ConversationalRetrievalQAChain|BaseChain",
"name": "conversationalRetrievalQAChain",
"label": "ConversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain | BaseChain"
}
],
"outputs": {},
"selected": false
},
"positionAbsolute": {
"x": 1574.2192193338556,
"y": 334.4358233039927
},
"selected": false,
"dragging": false
},
{
"width": 300,
"height": 523,
"height": 522,
"id": "chatOpenAI_0",
"position": {
"x": 1184.1176114500388,
"y": -44.15535835370571
"x": 1509.7110310286191,
"y": -171.0099374102956
},
"type": "customNode",
"data": {
@ -257,19 +148,19 @@
"selected": false
},
"positionAbsolute": {
"x": 1184.1176114500388,
"y": -44.15535835370571
"x": 1509.7110310286191,
"y": -171.0099374102956
},
"selected": false,
"dragging": false
},
{
"width": 300,
"height": 329,
"height": 328,
"id": "openAIEmbeddings_0",
"position": {
"x": 795.6162477805387,
"y": 603.260214150876
"x": 827.6835380475393,
"y": 253.8955254525015
},
"type": "customNode",
"data": {
@ -341,18 +232,18 @@
},
"selected": false,
"positionAbsolute": {
"x": 795.6162477805387,
"y": 603.260214150876
"x": 827.6835380475393,
"y": 253.8955254525015
},
"dragging": false
},
{
"width": 300,
"height": 555,
"height": 554,
"id": "pineconeUpsert_0",
"position": {
"x": 1191.1792786926865,
"y": 514.2126330994578
"x": 1178.0855412625938,
"y": -1.6626550640073674
},
"type": "customNode",
"data": {
@ -448,18 +339,18 @@
},
"selected": false,
"positionAbsolute": {
"x": 1191.1792786926865,
"y": 514.2126330994578
"x": 1178.0855412625938,
"y": -1.6626550640073674
},
"dragging": false
},
{
"width": 300,
"height": 380,
"height": 379,
"id": "cheerioWebScraper_0",
"position": {
"x": 788.9349009610441,
"y": 180.57790622561026
"x": 829.4409518246235,
"y": -168.78678247276423
},
"type": "customNode",
"data": {
@ -528,9 +419,9 @@
],
"inputs": {
"url": "https://www.itsjane.com",
"textSplitter": "{{recursiveCharacterTextSplitter_0.data.instance}}",
"relativeLinksMethod": "webCrawl",
"limit": "0",
"textSplitter": "{{htmlToMarkdownTextSplitter_0.data.instance}}",
"relativeLinksMethod": "",
"limit": "",
"metadata": ""
},
"outputAnchors": [
@ -546,28 +437,34 @@
},
"selected": false,
"positionAbsolute": {
"x": 788.9349009610441,
"y": 180.57790622561026
"x": 829.4409518246235,
"y": -168.78678247276423
},
"dragging": false
},
{
"width": 300,
"height": 376,
"id": "recursiveCharacterTextSplitter_0",
"id": "htmlToMarkdownTextSplitter_0",
"position": {
"x": 406.08456707531263,
"y": 197.66460328693972
"x": 443.00626484042334,
"y": 1.2942107707648631
},
"type": "customNode",
"data": {
"id": "recursiveCharacterTextSplitter_0",
"label": "Recursive Character Text Splitter",
"name": "recursiveCharacterTextSplitter",
"type": "RecursiveCharacterTextSplitter",
"baseClasses": ["RecursiveCharacterTextSplitter", "TextSplitter"],
"id": "htmlToMarkdownTextSplitter_0",
"label": "HtmlToMarkdown Text Splitter",
"name": "htmlToMarkdownTextSplitter",
"type": "HtmlToMarkdownTextSplitter",
"baseClasses": [
"HtmlToMarkdownTextSplitter",
"MarkdownTextSplitter",
"RecursiveCharacterTextSplitter",
"TextSplitter",
"BaseDocumentTransformer"
],
"category": "Text Splitters",
"description": "Split documents recursively by different characters - starting with \"\\n\\n\", then \"\\n\", then \" \"",
"description": "Converts Html to Markdown and then split your content into documents based on the Markdown headers",
"inputParams": [
{
"label": "Chunk Size",
@ -575,14 +472,14 @@
"type": "number",
"default": 1000,
"optional": true,
"id": "recursiveCharacterTextSplitter_0-input-chunkSize-number"
"id": "htmlToMarkdownTextSplitter_0-input-chunkSize-number"
},
{
"label": "Chunk Overlap",
"name": "chunkOverlap",
"type": "number",
"optional": true,
"id": "recursiveCharacterTextSplitter_0-input-chunkOverlap-number"
"id": "htmlToMarkdownTextSplitter_0-input-chunkOverlap-number"
}
],
"inputAnchors": [],
@ -592,10 +489,10 @@
},
"outputAnchors": [
{
"id": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter",
"name": "recursiveCharacterTextSplitter",
"label": "RecursiveCharacterTextSplitter",
"type": "RecursiveCharacterTextSplitter | TextSplitter"
"id": "htmlToMarkdownTextSplitter_0-output-htmlToMarkdownTextSplitter-HtmlToMarkdownTextSplitter|MarkdownTextSplitter|RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer",
"name": "htmlToMarkdownTextSplitter",
"label": "HtmlToMarkdownTextSplitter",
"type": "HtmlToMarkdownTextSplitter | MarkdownTextSplitter | RecursiveCharacterTextSplitter | TextSplitter | BaseDocumentTransformer"
}
],
"outputs": {},
@ -603,8 +500,195 @@
},
"selected": false,
"positionAbsolute": {
"x": 406.08456707531263,
"y": 197.66460328693972
"x": 443.00626484042334,
"y": 1.2942107707648631
},
"dragging": false
},
{
"width": 300,
"height": 479,
"id": "conversationalRetrievalQAChain_0",
"position": {
"x": 1882.5543981868987,
"y": 305.08959224761225
},
"type": "customNode",
"data": {
"id": "conversationalRetrievalQAChain_0",
"label": "Conversational Retrieval QA Chain",
"name": "conversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain",
"baseClasses": ["ConversationalRetrievalQAChain", "BaseChain"],
"category": "Chains",
"description": "Document QA - built on RetrievalQAChain to provide a chat history component",
"inputParams": [
{
"label": "Return Source Documents",
"name": "returnSourceDocuments",
"type": "boolean",
"optional": true,
"id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean"
},
{
"label": "System Message",
"name": "systemMessagePrompt",
"type": "string",
"rows": 4,
"additionalParams": true,
"optional": true,
"placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.",
"id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string"
},
{
"label": "Chain Option",
"name": "chainOption",
"type": "options",
"options": [
{
"label": "MapReduceDocumentsChain",
"name": "map_reduce",
"description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time"
},
{
"label": "RefineDocumentsChain",
"name": "refine",
"description": "Suitable for QA tasks over a large number of documents."
},
{
"label": "StuffDocumentsChain",
"name": "stuff",
"description": "Suitable for QA tasks over a small number of documents."
}
],
"additionalParams": true,
"optional": true,
"id": "conversationalRetrievalQAChain_0-input-chainOption-options"
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel"
},
{
"label": "Vector Store Retriever",
"name": "vectorStoreRetriever",
"type": "BaseRetriever",
"id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever"
},
{
"label": "Memory",
"name": "memory",
"type": "BaseMemory",
"optional": true,
"description": "If left empty, a default BufferMemory will be used",
"id": "conversationalRetrievalQAChain_0-input-memory-BaseMemory"
}
],
"inputs": {
"model": "{{chatOpenAI_0.data.instance}}",
"vectorStoreRetriever": "{{pineconeUpsert_0.data.instance}}",
"memory": "{{motorheadMemory_0.data.instance}}",
"returnSourceDocuments": true,
"systemMessagePrompt": "",
"chainOption": ""
},
"outputAnchors": [
{
"id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-ConversationalRetrievalQAChain|BaseChain",
"name": "conversationalRetrievalQAChain",
"label": "ConversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain | BaseChain"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1882.5543981868987,
"y": 305.08959224761225
},
"dragging": false
},
{
"width": 300,
"height": 426,
"id": "motorheadMemory_0",
"position": {
"x": 1515.4202055109095,
"y": 539.7912360964175
},
"type": "customNode",
"data": {
"id": "motorheadMemory_0",
"label": "Motorhead Memory",
"name": "motorheadMemory",
"type": "MotorheadMemory",
"baseClasses": ["MotorheadMemory", "BaseChatMemory", "BaseMemory"],
"category": "Memory",
"description": "Use Motorhead Memory to store chat conversations",
"inputParams": [
{
"label": "Connect Credential",
"name": "credential",
"type": "credential",
"optional": true,
"description": "Only needed when using hosted solution - https://getmetal.io",
"credentialNames": ["motorheadMemoryApi"],
"id": "motorheadMemory_0-input-credential-credential"
},
{
"label": "Base URL",
"name": "baseURL",
"type": "string",
"optional": true,
"description": "To use the online version, leave the URL blank. More details at https://getmetal.io.",
"id": "motorheadMemory_0-input-baseURL-string"
},
{
"label": "Session Id",
"name": "sessionId",
"type": "string",
"description": "if empty, chatId will be used automatically",
"default": "",
"additionalParams": true,
"optional": true,
"id": "motorheadMemory_0-input-sessionId-string"
},
{
"label": "Memory Key",
"name": "memoryKey",
"type": "string",
"default": "chat_history",
"additionalParams": true,
"id": "motorheadMemory_0-input-memoryKey-string"
}
],
"inputAnchors": [],
"inputs": {
"baseURL": "",
"sessionId": "",
"memoryKey": "chat_history"
},
"outputAnchors": [
{
"id": "motorheadMemory_0-output-motorheadMemory-MotorheadMemory|BaseChatMemory|BaseMemory",
"name": "motorheadMemory",
"label": "MotorheadMemory",
"type": "MotorheadMemory | BaseChatMemory | BaseMemory"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1515.4202055109095,
"y": 539.7912360964175
},
"dragging": false
}
@ -622,12 +706,23 @@
}
},
{
"source": "pineconeUpsert_0",
"sourceHandle": "pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"source": "cheerioWebScraper_0",
"sourceHandle": "cheerioWebScraper_0-output-cheerioWebScraper-Document",
"target": "pineconeUpsert_0",
"targetHandle": "pineconeUpsert_0-input-document-Document",
"type": "buttonedge",
"id": "pineconeUpsert_0-pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"id": "cheerioWebScraper_0-cheerioWebScraper_0-output-cheerioWebScraper-Document-pineconeUpsert_0-pineconeUpsert_0-input-document-Document",
"data": {
"label": ""
}
},
{
"source": "htmlToMarkdownTextSplitter_0",
"sourceHandle": "htmlToMarkdownTextSplitter_0-output-htmlToMarkdownTextSplitter-HtmlToMarkdownTextSplitter|MarkdownTextSplitter|RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer",
"target": "cheerioWebScraper_0",
"targetHandle": "cheerioWebScraper_0-input-textSplitter-TextSplitter",
"type": "buttonedge",
"id": "htmlToMarkdownTextSplitter_0-htmlToMarkdownTextSplitter_0-output-htmlToMarkdownTextSplitter-HtmlToMarkdownTextSplitter|MarkdownTextSplitter|RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer-cheerioWebScraper_0-cheerioWebScraper_0-input-textSplitter-TextSplitter",
"data": {
"label": ""
}
@ -644,23 +739,23 @@
}
},
{
"source": "recursiveCharacterTextSplitter_0",
"sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter",
"target": "cheerioWebScraper_0",
"targetHandle": "cheerioWebScraper_0-input-textSplitter-TextSplitter",
"source": "pineconeUpsert_0",
"sourceHandle": "pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"type": "buttonedge",
"id": "recursiveCharacterTextSplitter_0-recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter-cheerioWebScraper_0-cheerioWebScraper_0-input-textSplitter-TextSplitter",
"id": "pineconeUpsert_0-pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"data": {
"label": ""
}
},
{
"source": "cheerioWebScraper_0",
"sourceHandle": "cheerioWebScraper_0-output-cheerioWebScraper-Document",
"target": "pineconeUpsert_0",
"targetHandle": "pineconeUpsert_0-input-document-Document",
"source": "motorheadMemory_0",
"sourceHandle": "motorheadMemory_0-output-motorheadMemory-MotorheadMemory|BaseChatMemory|BaseMemory",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-memory-BaseMemory",
"type": "buttonedge",
"id": "cheerioWebScraper_0-cheerioWebScraper_0-output-cheerioWebScraper-Document-pineconeUpsert_0-pineconeUpsert_0-input-document-Document",
"id": "motorheadMemory_0-motorheadMemory_0-output-motorheadMemory-MotorheadMemory|BaseChatMemory|BaseMemory-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-memory-BaseMemory",
"data": {
"label": ""
}

View File

@ -1,6 +1,6 @@
{
"ignore": ["**/*.spec.ts", ".git", "node_modules"],
"watch": ["commands", "index.ts", "src"],
"watch": ["commands", "index.ts", "src", "../components/nodes", "../components/src"],
"exec": "yarn oclif-dev",
"ext": "ts"
}

View File

@ -1,6 +1,6 @@
{
"name": "flowise",
"version": "1.2.15",
"version": "1.2.16",
"description": "Flowiseai Server",
"main": "dist/index",
"types": "dist/index.d.ts",
@ -56,6 +56,8 @@
"flowise-ui": "*",
"moment-timezone": "^0.5.34",
"multer": "^1.4.5-lts.1",
"mysql": "^2.18.1",
"pg": "^8.11.1",
"reflect-metadata": "^0.1.13",
"socket.io": "^4.6.1",
"sqlite3": "^5.1.6",

View File

@ -1,6 +1,14 @@
import path from 'path'
import { IChildProcessMessage, IReactFlowNode, IReactFlowObject, IRunChatflowMessageValue, INodeData } from './Interface'
import { buildLangchain, constructGraphs, getEndingNode, getStartingNodes, getUserHome, resolveVariables } from './utils'
import {
buildLangchain,
constructGraphs,
getEndingNode,
getStartingNodes,
getUserHome,
replaceInputsWithConfig,
resolveVariables
} from './utils'
import { DataSource } from 'typeorm'
import { ChatFlow } from './entity/ChatFlow'
import { ChatMessage } from './entity/ChatMessage'
@ -110,6 +118,8 @@ export class ChildProcess {
return
}
if (incomingInput.overrideConfig)
nodeToExecute.data = replaceInputsWithConfig(nodeToExecute.data, incomingInput.overrideConfig)
const reactFlowNodeData: INodeData = resolveVariables(nodeToExecute.data, reactFlowNodes, incomingInput.question)
nodeToExecuteData = reactFlowNodeData
@ -143,14 +153,59 @@ export class ChildProcess {
* @returns {DataSource}
*/
async function initDB() {
const homePath = process.env.DATABASE_PATH ?? path.join(getUserHome(), '.flowise')
const childAppDataSource = new DataSource({
type: 'sqlite',
database: path.resolve(homePath, 'database.sqlite'),
synchronize: true,
entities: [ChatFlow, ChatMessage, Tool, Credential],
migrations: []
})
let childAppDataSource
let homePath
const synchronize = process.env.OVERRIDE_DATABASE === 'false' ? false : true
switch (process.env.DATABASE_TYPE) {
case 'sqlite':
homePath = process.env.DATABASE_PATH ?? path.join(getUserHome(), '.flowise')
childAppDataSource = new DataSource({
type: 'sqlite',
database: path.resolve(homePath, 'database.sqlite'),
synchronize,
entities: [ChatFlow, ChatMessage, Tool, Credential],
migrations: []
})
break
case 'mysql':
childAppDataSource = new DataSource({
type: 'mysql',
host: process.env.DATABASE_HOST,
port: parseInt(process.env.DATABASE_PORT || '3306'),
username: process.env.DATABASE_USER,
password: process.env.DATABASE_PASSWORD,
database: process.env.DATABASE_NAME,
charset: 'utf8mb4',
synchronize,
entities: [ChatFlow, ChatMessage, Tool, Credential],
migrations: []
})
break
case 'postgres':
childAppDataSource = new DataSource({
type: 'postgres',
host: process.env.DATABASE_HOST,
port: parseInt(process.env.DATABASE_PORT || '5432'),
username: process.env.DATABASE_USER,
password: process.env.DATABASE_PASSWORD,
database: process.env.DATABASE_NAME,
synchronize,
entities: [ChatFlow, ChatMessage, Tool, Credential],
migrations: []
})
break
default:
homePath = process.env.DATABASE_PATH ?? path.join(getUserHome(), '.flowise')
childAppDataSource = new DataSource({
type: 'sqlite',
database: path.resolve(homePath, 'database.sqlite'),
synchronize,
entities: [ChatFlow, ChatMessage, Tool, Credential],
migrations: []
})
break
}
return await childAppDataSource.initialize()
}

View File

@ -10,15 +10,57 @@ import { getUserHome } from './utils'
let appDataSource: DataSource
export const init = async (): Promise<void> => {
const homePath = process.env.DATABASE_PATH ?? path.join(getUserHome(), '.flowise')
appDataSource = new DataSource({
type: 'sqlite',
database: path.resolve(homePath, 'database.sqlite'),
synchronize: true,
entities: [ChatFlow, ChatMessage, Tool, Credential],
migrations: []
})
let homePath
const synchronize = process.env.OVERRIDE_DATABASE === 'false' ? false : true
switch (process.env.DATABASE_TYPE) {
case 'sqlite':
homePath = process.env.DATABASE_PATH ?? path.join(getUserHome(), '.flowise')
appDataSource = new DataSource({
type: 'sqlite',
database: path.resolve(homePath, 'database.sqlite'),
synchronize,
entities: [ChatFlow, ChatMessage, Tool, Credential],
migrations: []
})
break
case 'mysql':
appDataSource = new DataSource({
type: 'mysql',
host: process.env.DATABASE_HOST,
port: parseInt(process.env.DATABASE_PORT || '3306'),
username: process.env.DATABASE_USER,
password: process.env.DATABASE_PASSWORD,
database: process.env.DATABASE_NAME,
charset: 'utf8mb4',
synchronize,
entities: [ChatFlow, ChatMessage, Tool, Credential],
migrations: []
})
break
case 'postgres':
appDataSource = new DataSource({
type: 'postgres',
host: process.env.DATABASE_HOST,
port: parseInt(process.env.DATABASE_PORT || '5432'),
username: process.env.DATABASE_USER,
password: process.env.DATABASE_PASSWORD,
database: process.env.DATABASE_NAME,
synchronize,
entities: [ChatFlow, ChatMessage, Tool, Credential],
migrations: []
})
break
default:
homePath = process.env.DATABASE_PATH ?? path.join(getUserHome(), '.flowise')
appDataSource = new DataSource({
type: 'sqlite',
database: path.resolve(homePath, 'database.sqlite'),
synchronize,
entities: [ChatFlow, ChatMessage, Tool, Credential],
migrations: []
})
break
}
}
export function getDataSource(): DataSource {

View File

@ -21,12 +21,21 @@ export default class Start extends Command {
PORT: Flags.string(),
PASSPHRASE: Flags.string(),
DEBUG: Flags.string(),
DATABASE_PATH: Flags.string(),
APIKEY_PATH: Flags.string(),
SECRETKEY_PATH: Flags.string(),
LOG_PATH: Flags.string(),
LOG_LEVEL: Flags.string(),
EXECUTION_MODE: Flags.string()
EXECUTION_MODE: Flags.string(),
TOOL_FUNCTION_BUILTIN_DEP: Flags.string(),
TOOL_FUNCTION_EXTERNAL_DEP: Flags.string(),
OVERRIDE_DATABASE: Flags.string(),
DATABASE_TYPE: Flags.string(),
DATABASE_PATH: Flags.string(),
DATABASE_PORT: Flags.string(),
DATABASE_HOST: Flags.string(),
DATABASE_NAME: Flags.string(),
DATABASE_USER: Flags.string(),
DATABASE_PASSWORD: Flags.string()
}
async stopProcess() {
@ -58,18 +67,38 @@ export default class Start extends Command {
})
const { flags } = await this.parse(Start)
if (flags.FLOWISE_USERNAME) process.env.FLOWISE_USERNAME = flags.FLOWISE_USERNAME
if (flags.FLOWISE_PASSWORD) process.env.FLOWISE_PASSWORD = flags.FLOWISE_PASSWORD
if (flags.PORT) process.env.PORT = flags.PORT
if (flags.PASSPHRASE) process.env.PASSPHRASE = flags.PASSPHRASE
if (flags.DATABASE_PATH) process.env.DATABASE_PATH = flags.DATABASE_PATH
if (flags.APIKEY_PATH) process.env.APIKEY_PATH = flags.APIKEY_PATH
if (flags.SECRETKEY_PATH) process.env.SECRETKEY_PATH = flags.SECRETKEY_PATH
if (flags.LOG_PATH) process.env.LOG_PATH = flags.LOG_PATH
if (flags.LOG_LEVEL) process.env.LOG_LEVEL = flags.LOG_LEVEL
if (flags.EXECUTION_MODE) process.env.EXECUTION_MODE = flags.EXECUTION_MODE
if (flags.DEBUG) process.env.DEBUG = flags.DEBUG
// Authorization
if (flags.FLOWISE_USERNAME) process.env.FLOWISE_USERNAME = flags.FLOWISE_USERNAME
if (flags.FLOWISE_PASSWORD) process.env.FLOWISE_PASSWORD = flags.FLOWISE_PASSWORD
if (flags.APIKEY_PATH) process.env.APIKEY_PATH = flags.APIKEY_PATH
// Credentials
if (flags.PASSPHRASE) process.env.PASSPHRASE = flags.PASSPHRASE
if (flags.SECRETKEY_PATH) process.env.SECRETKEY_PATH = flags.SECRETKEY_PATH
// Logs
if (flags.LOG_PATH) process.env.LOG_PATH = flags.LOG_PATH
if (flags.LOG_LEVEL) process.env.LOG_LEVEL = flags.LOG_LEVEL
// Tool functions
if (flags.TOOL_FUNCTION_BUILTIN_DEP) process.env.TOOL_FUNCTION_BUILTIN_DEP = flags.TOOL_FUNCTION_BUILTIN_DEP
if (flags.TOOL_FUNCTION_EXTERNAL_DEP) process.env.TOOL_FUNCTION_EXTERNAL_DEP = flags.TOOL_FUNCTION_EXTERNAL_DEP
// Database config
if (flags.OVERRIDE_DATABASE) process.env.OVERRIDE_DATABASE = flags.OVERRIDE_DATABASE
if (flags.DATABASE_TYPE) process.env.DATABASE_TYPE = flags.DATABASE_TYPE
if (flags.DATABASE_PATH) process.env.DATABASE_PATH = flags.DATABASE_PATH
if (flags.DATABASE_PORT) process.env.DATABASE_PORT = flags.DATABASE_PORT
if (flags.DATABASE_HOST) process.env.DATABASE_HOST = flags.DATABASE_HOST
if (flags.DATABASE_NAME) process.env.DATABASE_NAME = flags.DATABASE_NAME
if (flags.DATABASE_USER) process.env.DATABASE_USER = flags.DATABASE_USER
if (flags.DATABASE_PASSWORD) process.env.DATABASE_PASSWORD = flags.DATABASE_PASSWORD
await (async () => {
try {
logger.info('Starting Flowise...')

View File

@ -10,7 +10,7 @@ export class ChatFlow implements IChatFlow {
@Column()
name: string
@Column()
@Column({ type: 'text' })
flowData: string
@Column({ nullable: true })

View File

@ -14,7 +14,7 @@ export class ChatMessage implements IChatMessage {
@Column()
chatflowid: string
@Column()
@Column({ type: 'text' })
content: string
@Column({ nullable: true })

View File

@ -10,7 +10,7 @@ export class Tool implements ITool {
@Column()
name: string
@Column()
@Column({ type: 'text' })
description: string
@Column()

View File

@ -42,7 +42,9 @@ import {
databaseEntities,
getApiKey,
transformToCredentialEntity,
decryptCredentialData
decryptCredentialData,
clearSessionMemory,
replaceInputsWithConfig
} from './utils'
import { cloneDeep, omit } from 'lodash'
import { getDataSource } from './DataSource'
@ -63,9 +65,6 @@ export class App {
constructor() {
this.app = express()
// Add the expressRequestLogger middleware to log all requests
this.app.use(expressRequestLogger)
}
async initDatabase() {
@ -97,6 +96,9 @@ export class App {
// Allow access from *
this.app.use(cors())
// Add the expressRequestLogger middleware to log all requests
this.app.use(expressRequestLogger)
if (process.env.FLOWISE_USERNAME && process.env.FLOWISE_PASSWORD) {
const username = process.env.FLOWISE_USERNAME
const password = process.env.FLOWISE_PASSWORD
@ -362,8 +364,13 @@ export class App {
// Get all chatmessages from chatflowid
this.app.get('/api/v1/chatmessage/:id', async (req: Request, res: Response) => {
const chatmessages = await this.AppDataSource.getRepository(ChatMessage).findBy({
chatflowid: req.params.id
const chatmessages = await this.AppDataSource.getRepository(ChatMessage).find({
where: {
chatflowid: req.params.id
},
order: {
createdDate: 'ASC'
}
})
return res.json(chatmessages)
})
@ -382,6 +389,19 @@ export class App {
// Delete all chatmessages from chatflowid
this.app.delete('/api/v1/chatmessage/:id', async (req: Request, res: Response) => {
const chatflow = await this.AppDataSource.getRepository(ChatFlow).findOneBy({
id: req.params.id
})
if (!chatflow) {
res.status(404).send(`Chatflow ${req.params.id} not found`)
return
}
const flowData = chatflow.flowData
const parsedFlowData: IReactFlowObject = JSON.parse(flowData)
const nodes = parsedFlowData.nodes
let chatId = await getChatId(chatflow.id)
if (!chatId) chatId = chatflow.id
clearSessionMemory(nodes, this.nodesPool.componentNodes, chatId, req.query.sessionId as string)
const results = await this.AppDataSource.getRepository(ChatMessage).delete({ chatflowid: req.params.id })
return res.json(results)
})
@ -630,6 +650,8 @@ export class App {
}
templates.push(template)
})
const FlowiseDocsQnA = templates.find((tmp) => tmp.name === 'Flowise Docs QnA')
if (FlowiseDocsQnA) templates.unshift(FlowiseDocsQnA)
return res.json(templates)
})
@ -809,7 +831,7 @@ export class App {
if (!chatflow) return res.status(404).send(`Chatflow ${chatflowid} not found`)
let chatId = await getChatId(chatflow.id)
if (!chatId) chatId = Date.now().toString()
if (!chatId) chatId = chatflowid
if (!isInternal) {
await this.validateKey(req, res, chatflow)
@ -941,6 +963,8 @@ export class App {
const nodeToExecute = reactFlowNodes.find((node: IReactFlowNode) => node.id === endingNodeId)
if (!nodeToExecute) return res.status(404).send(`Node ${endingNodeId} not found`)
if (incomingInput.overrideConfig)
nodeToExecute.data = replaceInputsWithConfig(nodeToExecute.data, incomingInput.overrideConfig)
const reactFlowNodeData: INodeData = resolveVariables(nodeToExecute.data, reactFlowNodes, incomingInput.question)
nodeToExecuteData = reactFlowNodeData

View File

@ -7,7 +7,7 @@ dotenv.config({ path: path.join(__dirname, '..', '..', '.env'), override: true }
// default config
const loggingConfig = {
dir: process.env.LOG_PATH ?? path.join(__dirname, '..', '..', '..', '..', 'logs'),
dir: process.env.LOG_PATH ?? path.join(__dirname, '..', '..', 'logs'),
server: {
level: process.env.LOG_LEVEL ?? 'info',
filename: 'server.log',

View File

@ -19,7 +19,7 @@ import {
ICredentialReqBody
} from '../Interface'
import { cloneDeep, get, omit, merge } from 'lodash'
import { ICommonObject, getInputVariables, IDatabaseEntity } from 'flowise-components'
import { ICommonObject, getInputVariables, IDatabaseEntity, handleEscapeCharacters } from 'flowise-components'
import { scryptSync, randomBytes, timingSafeEqual } from 'crypto'
import { lib, PBKDF2, AES, enc } from 'crypto-js'
@ -281,6 +281,29 @@ export const buildLangchain = async (
return flowNodes
}
/**
* Clear memory
* @param {IReactFlowNode[]} reactFlowNodes
* @param {IComponentNodes} componentNodes
* @param {string} chatId
* @param {string} sessionId
*/
export const clearSessionMemory = async (
reactFlowNodes: IReactFlowNode[],
componentNodes: IComponentNodes,
chatId: string,
sessionId?: string
) => {
for (const node of reactFlowNodes) {
if (node.data.category !== 'Memory') continue
const nodeInstanceFilePath = componentNodes[node.data.name].filePath as string
const nodeModule = await import(nodeInstanceFilePath)
const newNodeInstance = new nodeModule.nodeClass()
if (sessionId && node.data.inputs) node.data.inputs.sessionId = sessionId
if (newNodeInstance.clearSessionMemory) await newNodeInstance?.clearSessionMemory(node.data, { chatId })
}
}
/**
* Get variable value from outputResponses.output
* @param {string} paramValue
@ -310,8 +333,13 @@ export const getVariableValue = (paramValue: string, reactFlowNodes: IReactFlowN
const variableEndIdx = startIdx
const variableFullPath = returnVal.substring(variableStartIdx, variableEndIdx)
/**
* Apply string transformation to convert special chars:
* FROM: hello i am ben\n\n\thow are you?
* TO: hello i am benFLOWISE_NEWLINEFLOWISE_NEWLINEFLOWISE_TABhow are you?
*/
if (isAcceptVariable && variableFullPath === QUESTION_VAR_PREFIX) {
variableDict[`{{${variableFullPath}}}`] = question
variableDict[`{{${variableFullPath}}}`] = handleEscapeCharacters(question, false)
}
// Split by first occurrence of '.' to get just nodeId
@ -414,7 +442,11 @@ export const replaceInputsWithConfig = (flowNodeData: INodeData, overrideConfig:
const getParamValues = (paramsObj: ICommonObject) => {
for (const config in overrideConfig) {
paramsObj[config] = overrideConfig[config]
let paramValue = overrideConfig[config] ?? paramsObj[config]
// Check if boolean
if (paramValue === 'true') paramValue = true
else if (paramValue === 'false') paramValue = false
paramsObj[config] = paramValue
}
}
@ -730,7 +762,7 @@ export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNod
isValidChainOrAgent = !blacklistChains.includes(endingNodeData.name)
} else if (endingNodeData.category === 'Agents') {
// Agent that are available to stream
const whitelistAgents = ['openAIFunctionAgent']
const whitelistAgents = ['openAIFunctionAgent', 'csvAgent', 'airtableAgent']
isValidChainOrAgent = whitelistAgents.includes(endingNodeData.name)
}

View File

@ -57,44 +57,47 @@ const logger = createLogger({
* this.app.use(expressRequestLogger)
*/
export function expressRequestLogger(req: Request, res: Response, next: NextFunction): void {
const fileLogger = createLogger({
format: combine(timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), format.json(), errors({ stack: true })),
defaultMeta: {
package: 'server',
request: {
method: req.method,
url: req.url,
body: req.body,
query: req.query,
params: req.params,
headers: req.headers
}
},
transports: [
new transports.File({
filename: path.join(logDir, config.logging.express.filename ?? 'server-requests.log.jsonl'),
level: config.logging.express.level ?? 'debug'
})
]
})
const unwantedLogURLs = ['/api/v1/node-icon/']
if (req.url.includes('/api/v1/') && !unwantedLogURLs.some((url) => req.url.includes(url))) {
const fileLogger = createLogger({
format: combine(timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), format.json(), errors({ stack: true })),
defaultMeta: {
package: 'server',
request: {
method: req.method,
url: req.url,
body: req.body,
query: req.query,
params: req.params,
headers: req.headers
}
},
transports: [
new transports.File({
filename: path.join(logDir, config.logging.express.filename ?? 'server-requests.log.jsonl'),
level: config.logging.express.level ?? 'debug'
})
]
})
const getRequestEmoji = (method: string) => {
const requetsEmojis: Record<string, string> = {
GET: '⬇️',
POST: '⬆️',
PUT: '🖊',
DELETE: '❌',
OPTION: '🔗'
const getRequestEmoji = (method: string) => {
const requetsEmojis: Record<string, string> = {
GET: '⬇️',
POST: '⬆️',
PUT: '🖊',
DELETE: '❌',
OPTION: '🔗'
}
return requetsEmojis[method] || '?'
}
return requetsEmojis[method] || '?'
}
if (req.method !== 'GET') {
fileLogger.info(`${getRequestEmoji(req.method)} ${req.method} ${req.url}`)
logger.info(`${getRequestEmoji(req.method)} ${req.method} ${req.url}`)
} else {
fileLogger.http(`${getRequestEmoji(req.method)} ${req.method} ${req.url}`)
if (req.method !== 'GET') {
fileLogger.info(`${getRequestEmoji(req.method)} ${req.method} ${req.url}`)
logger.info(`${getRequestEmoji(req.method)} ${req.method} ${req.url}`)
} else {
fileLogger.http(`${getRequestEmoji(req.method)} ${req.method} ${req.url}`)
}
}
next()

View File

@ -1,6 +1,6 @@
{
"name": "flowise-ui",
"version": "1.2.14",
"version": "1.2.15",
"license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://flowiseai.com",
"author": {

View File

@ -11,6 +11,8 @@ export const SET_DARKMODE = '@customization/SET_DARKMODE'
export const SET_DIRTY = '@canvas/SET_DIRTY'
export const REMOVE_DIRTY = '@canvas/REMOVE_DIRTY'
export const SET_CHATFLOW = '@canvas/SET_CHATFLOW'
export const SHOW_CANVAS_DIALOG = '@canvas/SHOW_CANVAS_DIALOG'
export const HIDE_CANVAS_DIALOG = '@canvas/HIDE_CANVAS_DIALOG'
// action - notifier reducer
export const ENQUEUE_SNACKBAR = 'ENQUEUE_SNACKBAR'

View File

@ -3,7 +3,8 @@ import * as actionTypes from '../actions'
export const initialState = {
isDirty: false,
chatflow: null
chatflow: null,
canvasDialogShow: false
}
// ==============================|| CANVAS REDUCER ||============================== //
@ -25,6 +26,16 @@ const canvasReducer = (state = initialState, action) => {
...state,
chatflow: action.chatflow
}
case actionTypes.SHOW_CANVAS_DIALOG:
return {
...state,
canvasDialogShow: true
}
case actionTypes.HIDE_CANVAS_DIALOG:
return {
...state,
canvasDialogShow: false
}
default:
return state
}

View File

@ -1,12 +1,15 @@
import { createPortal } from 'react-dom'
import { useDispatch } from 'react-redux'
import { useState, useEffect } from 'react'
import PropTypes from 'prop-types'
import { Dialog, DialogContent } from '@mui/material'
import PerfectScrollbar from 'react-perfect-scrollbar'
import NodeInputHandler from 'views/canvas/NodeInputHandler'
import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from 'store/actions'
const AdditionalParamsDialog = ({ show, dialogProps, onCancel }) => {
const portalElement = document.getElementById('portal')
const dispatch = useDispatch()
const [inputParams, setInputParams] = useState([])
const [data, setData] = useState({})
@ -21,6 +24,11 @@ const AdditionalParamsDialog = ({ show, dialogProps, onCancel }) => {
}
}, [dialogProps])
useEffect(() => {
if (show) dispatch({ type: SHOW_CANVAS_DIALOG })
else dispatch({ type: HIDE_CANVAS_DIALOG })
}, [show, dispatch])
const component = show ? (
<Dialog
onClose={onCancel}

View File

@ -1,6 +1,6 @@
import { createPortal } from 'react-dom'
import { useState, useEffect } from 'react'
import { useSelector } from 'react-redux'
import { useSelector, useDispatch } from 'react-redux'
import PropTypes from 'prop-types'
import { Button, Dialog, DialogActions, DialogContent, Typography } from '@mui/material'
import { useTheme } from '@mui/material/styles'
@ -8,6 +8,7 @@ import PerfectScrollbar from 'react-perfect-scrollbar'
import { StyledButton } from 'ui-component/button/StyledButton'
import { DarkCodeEditor } from 'ui-component/editor/DarkCodeEditor'
import { LightCodeEditor } from 'ui-component/editor/LightCodeEditor'
import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from 'store/actions'
import './ExpandTextDialog.css'
@ -15,6 +16,7 @@ const ExpandTextDialog = ({ show, dialogProps, onCancel, onConfirm }) => {
const portalElement = document.getElementById('portal')
const theme = useTheme()
const dispatch = useDispatch()
const customization = useSelector((state) => state.customization)
const languageType = 'json'
@ -31,6 +33,11 @@ const ExpandTextDialog = ({ show, dialogProps, onCancel, onConfirm }) => {
}
}, [dialogProps])
useEffect(() => {
if (show) dispatch({ type: SHOW_CANVAS_DIALOG })
else dispatch({ type: HIDE_CANVAS_DIALOG })
}, [show, dispatch])
const component = show ? (
<Dialog open={show} fullWidth maxWidth='md' aria-labelledby='alert-dialog-title' aria-describedby='alert-dialog-description'>
<DialogContent>

Some files were not shown because too many files have changed in this diff Show More