Chore/refractor (#4454)

* markdown files and env examples cleanup

* components update

* update jsonlines description

* server refractor

* update telemetry

* add execute custom node

* add ui refractor

* add username and password authenticate

* correctly retrieve past images in agentflowv2

* disable e2e temporarily

* add existing username and password authenticate

* update migration to default workspace

* update todo

* blob storage migrating

* throw error on agent tool call error

* add missing execution import

* add referral

* chore: add error message when importData is undefined

* migrate api keys to db

* fix: data too long for column executionData

* migrate api keys from json to db at init

* add info on account setup

* update docstore missing fields

---------

Co-authored-by: chungyau97 <chungyau97@gmail.com>
This commit is contained in:
Henry Heng 2025-05-27 14:29:42 +08:00 committed by GitHub
parent e35a126b46
commit 5a37227d14
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
560 changed files with 62127 additions and 4100 deletions

View File

@ -1,33 +0,0 @@
name: autoSyncMergedPullRequest
on:
pull_request_target:
types:
- closed
branches: ['main']
jobs:
autoSyncMergedPullRequest:
if: github.event.pull_request.merged == true
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v4
- name: Show PR info
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo The PR #${{ github.event.pull_request.number }} was merged on main branch!
- name: Repository Dispatch
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ secrets.AUTOSYNC_TOKEN }}
repository: ${{ secrets.AUTOSYNC_CH_URL }}
event-type: ${{ secrets.AUTOSYNC_PR_EVENT_TYPE }}
client-payload: >-
{
"ref": "${{ github.ref }}",
"prNumber": "${{ github.event.pull_request.number }}",
"prTitle": "${{ github.event.pull_request.title }}",
"prDescription": "",
"sha": "${{ github.sha }}"
}

View File

@ -1,36 +0,0 @@
name: autoSyncSingleCommit
on:
push:
branches:
- main
jobs:
doNotAutoSyncSingleCommit:
if: github.event.commits[1] != null
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: IGNORE autoSyncSingleCommit
run: |
echo This single commit has came from a merged commit. We will ignore it. This case is handled in autoSyncMergedPullRequest workflow for merge commits comming from merged pull requests only! Beware, the regular merge commits are not handled by any workflow for the moment.
autoSyncSingleCommit:
if: github.event.commits[1] == null
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: autoSyncSingleCommit
env:
GITHUB_CONTEXT: ${{ toJSON(github) }}
run: |
echo Autosync a single commit with id: ${{ github.sha }} from openSource main branch towards cloud hosted version.
- name: Repository Dispatch
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ secrets.AUTOSYNC_TOKEN }}
repository: ${{ secrets.AUTOSYNC_CH_URL }}
event-type: ${{ secrets.AUTOSYNC_SC_EVENT_TYPE }}
client-payload: >-
{
"ref": "${{ github.ref }}",
"sha": "${{ github.sha }}",
"commitMessage": "${{ github.event.commits[0].id }}"
}

View File

@ -3,6 +3,22 @@ name: Docker Image CI
on:
workflow_dispatch:
inputs:
registry:
description: 'Container Registry to push the image to.'
type: choice
required: true
default: 'aws_ecr'
options:
- 'docker_hub'
- 'aws_ecr'
environment:
description: 'Environment to push the image to.'
required: true
default: 'dev'
type: choice
options:
- dev
- prod
node_version:
description: 'Node.js version to build this image with.'
type: choice
@ -19,25 +35,57 @@ on:
jobs:
docker:
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.environment }}
steps:
- name: Set default values
id: defaults
run: |
echo "registry=${{ github.event.inputs.registry || 'aws_ecr' }}" >> $GITHUB_OUTPUT
echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT
echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT
- name: Checkout
uses: actions/checkout@v4.1.1
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.0.0
# ------------------------
# Login Steps (conditional)
# ------------------------
- name: Login to Docker Hub
if: steps.defaults.outputs.registry == 'docker_hub'
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Configure AWS Credentials
if: steps.defaults.outputs.registry == 'aws_ecr'
uses: aws-actions/configure-aws-credentials@v3
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_REGION }}
- name: Login to Amazon ECR
if: steps.defaults.outputs.registry == 'aws_ecr'
uses: aws-actions/amazon-ecr-login@v1
# -------------------------
# Build and push (conditional tags)
# -------------------------
- name: Build and push
uses: docker/build-push-action@v5.3.0
with:
context: .
file: ./docker/Dockerfile
file: Dockerfile
build-args: |
NODE_VERSION=${{github.event.inputs.node_version}}
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
platforms: linux/amd64,linux/arm64
push: true
tags: flowiseai/flowise:${{github.event.inputs.tag_version}}
tags: |
${{ steps.defaults.outputs.registry == 'docker_hub' && format('flowiseai/flowise:{0}', steps.defaults.outputs.tag_version) || format('{0}.dkr.ecr.{1}.amazonaws.com/flowise:{2}', secrets.AWS_ACCOUNT_ID, secrets.AWS_REGION, steps.defaults.outputs.tag_version) }}

View File

@ -6,6 +6,7 @@ on:
pull_request:
branches:
- '*'
workflow_dispatch:
permissions:
contents: read
jobs:
@ -31,6 +32,8 @@ jobs:
- run: pnpm install
- run: pnpm lint
- run: pnpm build
env:
NODE_OPTIONS: '--max_old_space_size=4096'
- name: Cypress install
run: pnpm cypress install
- name: Install dependencies (Cypress Action)

View File

@ -8,13 +8,12 @@ on:
pull_request:
branches:
- '*'
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
env:
PUPPETEER_SKIP_DOWNLOAD: true
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- run: docker build --no-cache -t flowise .

View File

@ -125,15 +125,11 @@ Flowise support different environment variables to configure your instance. You
| PORT | The HTTP port Flowise runs on | Number | 3000 |
| CORS_ORIGINS | The allowed origins for all cross-origin HTTP calls | String | |
| IFRAME_ORIGINS | The allowed origins for iframe src embedding | String | |
| FLOWISE_USERNAME | Username to login | String | |
| FLOWISE_PASSWORD | Password to login | String | |
| FLOWISE_FILE_SIZE_LIMIT | Upload File Size Limit | String | 50mb |
| DEBUG | Print logs from components | Boolean | |
| LOG_PATH | Location where log files are stored | String | `your-path/Flowise/logs` |
| LOG_LEVEL | Different levels of logs | Enum String: `error`, `info`, `verbose`, `debug` | `info` |
| LOG_JSON_SPACES | Spaces to beautify JSON logs | | 2 |
| APIKEY_STORAGE_TYPE | To store api keys on a JSON file or database. Default is `json` | Enum String: `json`, `db` | `json` |
| APIKEY_PATH | Location where api keys are saved when `APIKEY_STORAGE_TYPE` is `json` | String | `your-path/Flowise/packages/server` |
| TOOL_FUNCTION_BUILTIN_DEP | NodeJS built-in modules to be used for Tool Function | String | |
| TOOL_FUNCTION_EXTERNAL_DEP | External modules to be used for Tool Function | String | |
| DATABASE_TYPE | Type of database to store the flowise data | Enum String: `sqlite`, `mysql`, `postgres` | `sqlite` |

View File

@ -1,6 +1,14 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
Copyright (c) 2023-present FlowiseAI, Inc.
Portions of this software are licensed as follows:
- All content that resides under https://github.com/FlowiseAI/Flowise/tree/main/packages/server/src/enterprise directory and files with explicit copyright notice such as [IdentityManager.ts](https://github.com/FlowiseAI/Flowise/tree/main/packages/server/src/IdentityManager.ts) are licensed under [Commercial License](https://github.com/FlowiseAI/Flowise/tree/main/packages/server/src/enterprise/LICENSE.md).
- All third party components incorporated into the FlowiseAI Software are licensed under the original license provided by the owner of the applicable component.
- Content outside of the above mentioned directories or restrictions above is available under the "Apache 2.0" license as defined below.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

View File

@ -31,12 +31,6 @@ Download and Install [NodeJS](https://nodejs.org/en/download) >= 18.15.0
npx flowise start
```
With username & password
```bash
npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234
```
3. Open [http://localhost:3000](http://localhost:3000)
## 🐳 Docker
@ -138,15 +132,6 @@ Flowise has 3 different modules in a single mono repository.
Any code changes will reload the app automatically on [http://localhost:8080](http://localhost:8080)
## 🔒 Authentication
To enable app level authentication, add `FLOWISE_USERNAME` and `FLOWISE_PASSWORD` to the `.env` file in `packages/server`:
```
FLOWISE_USERNAME=user
FLOWISE_PASSWORD=1234
```
## 🌱 Env Variables
Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder. Read [more](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)

View File

@ -1,16 +1,12 @@
PORT=3000
# APIKEY_PATH=/your_apikey_path/.flowise # (will be deprecated by end of 2025)
############################################################################################################
############################################## DATABASE ####################################################
############################################################################################################
DATABASE_PATH=/root/.flowise
APIKEY_PATH=/root/.flowise
SECRETKEY_PATH=/root/.flowise
LOG_PATH=/root/.flowise/logs
BLOB_STORAGE_PATH=/root/.flowise/storage
# APIKEY_STORAGE_TYPE=json (json | db)
# NUMBER_OF_PROXIES= 1
# CORS_ORIGINS=*
# IFRAME_ORIGINS=*
# DATABASE_TYPE=postgres
# DATABASE_PORT=5432
# DATABASE_HOST=""
@ -20,34 +16,37 @@ BLOB_STORAGE_PATH=/root/.flowise/storage
# DATABASE_SSL=true
# DATABASE_SSL_KEY_BASE64=<Self signed certificate in BASE64>
############################################################################################################
############################################## SECRET KEYS #################################################
############################################################################################################
# SECRETKEY_STORAGE_TYPE=local #(local | aws)
# SECRETKEY_PATH=/your_api_key_path/.flowise
# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey
SECRETKEY_PATH=/root/.flowise
# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey # (if you want to overwrite the secret key)
# SECRETKEY_AWS_ACCESS_KEY=<your-access-key>
# SECRETKEY_AWS_SECRET_KEY=<your-secret-key>
# SECRETKEY_AWS_REGION=us-west-2
# SECRETKEY_AWS_NAME=FlowiseEncryptionKey
# FLOWISE_USERNAME=user
# FLOWISE_PASSWORD=1234
# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey
# FLOWISE_FILE_SIZE_LIMIT=50mb
############################################################################################################
############################################## LOGGING #####################################################
############################################################################################################
# DEBUG=true
# LOG_LEVEL=info (error | warn | info | verbose | debug)
LOG_PATH=/root/.flowise/logs
# LOG_LEVEL=info #(error | warn | info | verbose | debug)
# TOOL_FUNCTION_BUILTIN_DEP=crypto,fs
# TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash
# LANGCHAIN_TRACING_V2=true
# LANGCHAIN_ENDPOINT=https://api.smith.langchain.com
# LANGCHAIN_API_KEY=your_api_key
# LANGCHAIN_PROJECT=your_project
# Uncomment the following line to enable model list config, load the list of models from your local config file
# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format
# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path
############################################################################################################
############################################## STORAGE #####################################################
############################################################################################################
# STORAGE_TYPE=local (local | s3 | gcs)
# BLOB_STORAGE_PATH=/your_storage_path/.flowise/storage
BLOB_STORAGE_PATH=/root/.flowise/storage
# S3_STORAGE_BUCKET_NAME=flowise
# S3_STORAGE_ACCESS_KEY_ID=<your-access-key>
# S3_STORAGE_SECRET_ACCESS_KEY=<your-secret-key>
@ -59,12 +58,69 @@ BLOB_STORAGE_PATH=/root/.flowise/storage
# GOOGLE_CLOUD_STORAGE_BUCKET_NAME=<the-bucket-name>
# GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=true
# SHOW_COMMUNITY_NODES=true
# DISABLED_NODES=bufferMemory,chatOpenAI (comma separated list of node names to disable)
######################
# METRICS COLLECTION
#######################
############################################################################################################
############################################## SETTINGS ####################################################
############################################################################################################
# NUMBER_OF_PROXIES= 1
# CORS_ORIGINS=*
# IFRAME_ORIGINS=*
# FLOWISE_FILE_SIZE_LIMIT=50mb
# SHOW_COMMUNITY_NODES=true
# DISABLE_FLOWISE_TELEMETRY=true
# DISABLED_NODES=bufferMemory,chatOpenAI (comma separated list of node names to disable)
# Uncomment the following line to enable model list config, load the list of models from your local config file
# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format
# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path
############################################################################################################
############################################ AUTH PARAMETERS ###############################################
############################################################################################################
# APP_URL=http://localhost:3000
# SMTP_HOST=smtp.host.com
# SMTP_PORT=465
# SMTP_USER=smtp_user
# SMTP_PASSWORD=smtp_password
# SMTP_SECURE=true
# ALLOW_UNAUTHORIZED_CERTS=false
# SENDER_EMAIL=team@example.com
# JWT_AUTH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD'
# JWT_REFRESH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD'
# JWT_ISSUER='ISSUER'
# JWT_AUDIENCE='AUDIENCE'
# JWT_TOKEN_EXPIRY_IN_MINUTES=360
# JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200
# EXPIRE_AUTH_TOKENS_ON_RESTART=true # (if you need to expire all tokens on app restart)
# EXPRESS_SESSION_SECRET=flowise
# INVITE_TOKEN_EXPIRY_IN_HOURS=24
# PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=15
# PASSWORD_SALT_HASH_ROUNDS=10
# TOKEN_HASH_SECRET='popcorn'
# WORKSPACE_INVITE_TEMPLATE_PATH=/path/to/custom/workspace_invite.hbs
############################################################################################################
############################################# ENTERPRISE ###################################################
############################################################################################################
# LICENSE_URL=
# FLOWISE_EE_LICENSE_KEY=
# OFFLINE=
############################################################################################################
########################################### METRICS COLLECTION #############################################
############################################################################################################
# POSTHOG_PUBLIC_API_KEY=your_posthog_public_api_key
# ENABLE_METRICS=false
# METRICS_PROVIDER=prometheus # prometheus | open_telemetry
# METRICS_INCLUDE_NODE_METRICS=true # default is true
@ -75,15 +131,21 @@ BLOB_STORAGE_PATH=/root/.flowise/storage
# METRICS_OPEN_TELEMETRY_PROTOCOL=http # http | grpc | proto (default is http)
# METRICS_OPEN_TELEMETRY_DEBUG=true # default is false
# Uncomment the following lines to enable global agent proxy
# see https://www.npmjs.com/package/global-agent for more details
############################################################################################################
############################################### PROXY ######################################################
############################################################################################################
# Uncomment the following lines to enable global agent proxy, see https://www.npmjs.com/package/global-agent for more details
# GLOBAL_AGENT_HTTP_PROXY=CorporateHttpProxyUrl
# GLOBAL_AGENT_HTTPS_PROXY=CorporateHttpsProxyUrl
# GLOBAL_AGENT_NO_PROXY=ExceptionHostsToBypassProxyIfNeeded
######################
# QUEUE CONFIGURATION
#######################
############################################################################################################
########################################### QUEUE CONFIGURATION ############################################
############################################################################################################
# MODE=queue #(queue | main)
# QUEUE_NAME=flowise-queue
# QUEUE_REDIS_EVENT_STREAM_MAX_LEN=100000

View File

@ -9,26 +9,11 @@ Starts Flowise from [DockerHub Image](https://hub.docker.com/r/flowiseai/flowise
3. Open [http://localhost:3000](http://localhost:3000)
4. You can bring the containers down by `docker compose stop`
## 🔒 Authentication
1. Create `.env` file and specify the `PORT`, `FLOWISE_USERNAME`, and `FLOWISE_PASSWORD` (refer to `.env.example`)
2. Pass `FLOWISE_USERNAME` and `FLOWISE_PASSWORD` to the `docker-compose.yml` file:
```
environment:
- PORT=${PORT}
- FLOWISE_USERNAME=${FLOWISE_USERNAME}
- FLOWISE_PASSWORD=${FLOWISE_PASSWORD}
```
3. `docker compose up -d`
4. Open [http://localhost:3000](http://localhost:3000)
5. You can bring the containers down by `docker compose stop`
## 🌱 Env Variables
If you like to persist your data (flows, logs, apikeys, credentials), set these variables in the `.env` file inside `docker` folder:
- DATABASE_PATH=/root/.flowise
- APIKEY_PATH=/root/.flowise
- LOG_PATH=/root/.flowise/logs
- SECRETKEY_PATH=/root/.flowise
- BLOB_STORAGE_PATH=/root/.flowise/storage

View File

@ -8,8 +8,6 @@ services:
- PORT=${PORT}
- CORS_ORIGINS=${CORS_ORIGINS}
- IFRAME_ORIGINS=${IFRAME_ORIGINS}
- FLOWISE_USERNAME=${FLOWISE_USERNAME}
- FLOWISE_PASSWORD=${FLOWISE_PASSWORD}
- FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT}
- DEBUG=${DEBUG}
- DATABASE_PATH=${DATABASE_PATH}
@ -21,10 +19,13 @@ services:
- DATABASE_PASSWORD=${DATABASE_PASSWORD}
- DATABASE_SSL=${DATABASE_SSL}
- DATABASE_SSL_KEY_BASE64=${DATABASE_SSL_KEY_BASE64}
- APIKEY_STORAGE_TYPE=${APIKEY_STORAGE_TYPE}
- APIKEY_PATH=${APIKEY_PATH}
- SECRETKEY_PATH=${SECRETKEY_PATH}
- FLOWISE_SECRETKEY_OVERWRITE=${FLOWISE_SECRETKEY_OVERWRITE}
- SECRETKEY_STORAGE_TYPE=${SECRETKEY_STORAGE_TYPE}
- SECRETKEY_AWS_ACCESS_KEY=${SECRETKEY_AWS_ACCESS_KEY}
- SECRETKEY_AWS_SECRET_KEY=${SECRETKEY_AWS_SECRET_KEY}
- SECRETKEY_AWS_REGION=${SECRETKEY_AWS_REGION}
- SECRETKEY_AWS_NAME=${SECRETKEY_AWS_NAME}
- LOG_LEVEL=${LOG_LEVEL}
- LOG_PATH=${LOG_PATH}
- BLOB_STORAGE_PATH=${BLOB_STORAGE_PATH}

View File

@ -8,8 +8,6 @@ services:
- PORT=${PORT}
- CORS_ORIGINS=${CORS_ORIGINS}
- IFRAME_ORIGINS=${IFRAME_ORIGINS}
- FLOWISE_USERNAME=${FLOWISE_USERNAME}
- FLOWISE_PASSWORD=${FLOWISE_PASSWORD}
- FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT}
- DEBUG=${DEBUG}
- DATABASE_PATH=${DATABASE_PATH}
@ -21,8 +19,6 @@ services:
- DATABASE_PASSWORD=${DATABASE_PASSWORD}
- DATABASE_SSL=${DATABASE_SSL}
- DATABASE_SSL_KEY_BASE64=${DATABASE_SSL_KEY_BASE64}
- APIKEY_STORAGE_TYPE=${APIKEY_STORAGE_TYPE}
- APIKEY_PATH=${APIKEY_PATH}
- SECRETKEY_PATH=${SECRETKEY_PATH}
- FLOWISE_SECRETKEY_OVERWRITE=${FLOWISE_SECRETKEY_OVERWRITE}
- LOG_LEVEL=${LOG_LEVEL}

View File

@ -119,16 +119,12 @@ Flowise 在一个单一的单体存储库中有 3 个不同的模块。
Flowise 支持不同的环境变量来配置您的实例。您可以在 `packages/server` 文件夹中的 `.env` 文件中指定以下变量。阅读[更多信息](https://docs.flowiseai.com/environment-variables)
| 变量名 | 描述 | 类型 | 默认值 |
| ---------------------------- | ------------------------------------------------------- | ----------------------------------------------- | ----------------------------------- | --- |
| PORT | Flowise 运行的 HTTP 端口 | 数字 | 3000 |
| FLOWISE_USERNAME | 登录用户名 | 字符串 | |
| FLOWISE_PASSWORD | 登录密码 | 字符串 | |
| ---------------------------- | ------------------------------------------------------- | ----------------------------------------------- | ----------------------------------- | --- | --- |
| PORT | Flowise 运行的 HTTP 端口 | 数字 | 3000 | | |
| FLOWISE_FILE_SIZE_LIMIT | 上传文件大小限制 | 字符串 | 50mb | |
| DEBUG | 打印组件的日志 | 布尔值 | |
| LOG_PATH | 存储日志文件的位置 | 字符串 | `your-path/Flowise/logs` |
| LOG_LEVEL | 日志的不同级别 | 枚举字符串: `error`, `info`, `verbose`, `debug` | `info` |
| APIKEY_STORAGE_TYPE | 存储 API 密钥的存储类型 | 枚举字符串: `json`, `db` | `json` |
| APIKEY_PATH | 存储 API 密钥的位置, 当`APIKEY_STORAGE_TYPE`是`json` | 字符串 | `your-path/Flowise/packages/server` |
| TOOL_FUNCTION_BUILTIN_DEP | 用于工具函数的 NodeJS 内置模块 | 字符串 | |
| TOOL_FUNCTION_EXTERNAL_DEP | 用于工具函数的外部模块 | 字符串 | |
| DATABASE_TYPE | 存储 flowise 数据的数据库类型 | 枚举字符串: `sqlite`, `mysql`, `postgres` | `sqlite` |

View File

@ -31,12 +31,6 @@
npx flowise start
```
ユーザー名とパスワードを入力
```bash
npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234
```
3. [http://localhost:3000](http://localhost:3000) を開く
## 🐳 Docker
@ -127,15 +121,6 @@ Flowise には、3 つの異なるモジュールが 1 つの mono リポジト
コードの変更は [http://localhost:8080](http://localhost:8080) に自動的にアプリをリロードします
## 🔒 認証
アプリレベルの認証を有効にするには、 `FLOWISE_USERNAME``FLOWISE_PASSWORD``packages/server``.env` ファイルに追加します:
```
FLOWISE_USERNAME=user
FLOWISE_PASSWORD=1234
```
## 🌱 環境変数
Flowise は、インスタンスを設定するためのさまざまな環境変数をサポートしています。`packages/server` フォルダ内の `.env` ファイルで以下の変数を指定することができる。[続き](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)を読む

View File

@ -31,12 +31,6 @@
npx flowise start
```
사용자 이름과 비밀번호로 시작하기
```bash
npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234
```
3. [http://localhost:3000](http://localhost:3000) URL 열기
## 🐳 도커(Docker)를 활용하여 시작하기
@ -127,15 +121,6 @@ Flowise는 단일 리포지토리에 3개의 서로 다른 모듈이 있습니
코드가 변경되면 [http://localhost:8080](http://localhost:8080)에서 자동으로 애플리케이션을 새로고침 합니다.
## 🔒 인증
애플리케이션 수준의 인증을 사용하려면 `packages/server``.env` 파일에 `FLOWISE_USERNAME``FLOWISE_PASSWORD`를 추가합니다:
```
FLOWISE_USERNAME=user
FLOWISE_PASSWORD=1234
```
## 🌱 환경 변수
Flowise는 인스턴스 구성을 위한 다양한 환경 변수를 지원합니다. `packages/server` 폴더 내 `.env` 파일에 다양한 환경 변수를 지정할 수 있습니다. [자세히 보기](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)

View File

@ -31,12 +31,6 @@
npx flowise start
```
使用用戶名和密碼
```bash
npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234
```
3. 打開 [http://localhost:3000](http://localhost:3000)
## 🐳 Docker
@ -138,15 +132,6 @@ Flowise 在單個 mono 存儲庫中有 3 個不同的模塊。
任何代碼更改都會自動重新加載應用程序 [http://localhost:8080](http://localhost:8080)
## 🔒 認證
要啟用應用級別的身份驗證,請在 `packages/server` 中的 `.env` 文件中添加 `FLOWISE_USERNAME``FLOWISE_PASSWORD`
```
FLOWISE_USERNAME=user
FLOWISE_PASSWORD=1234
```
## 🌱 環境變量
Flowise 支持不同的環境變量來配置您的實例。您可以在 `packages/server` 文件夾中的 `.env` 文件中指定以下變量。閱讀 [更多](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)

View File

@ -31,12 +31,6 @@
npx flowise start
```
使用用户名和密码
```bash
npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234
```
3. 打开 [http://localhost:3000](http://localhost:3000)
## 🐳 Docker
@ -127,15 +121,6 @@ Flowise 在一个单一的代码库中有 3 个不同的模块。
任何代码更改都会自动重新加载应用程序,访问 [http://localhost:8080](http://localhost:8080)
## 🔒 认证
要启用应用程序级身份验证,在 `packages/server``.env` 文件中添加 `FLOWISE_USERNAME``FLOWISE_PASSWORD`
```
FLOWISE_USERNAME=user
FLOWISE_PASSWORD=1234
```
## 🌱 环境变量
Flowise 支持不同的环境变量来配置您的实例。您可以在 `packages/server` 文件夹中的 `.env` 文件中指定以下变量。了解更多信息,请阅读[文档](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)

View File

@ -20,6 +20,7 @@
"start-worker": "run-script-os",
"start-worker:windows": "cd packages/server/bin && run worker",
"start-worker:default": "cd packages/server/bin && ./run worker",
"test": "turbo run test",
"clean": "pnpm --filter \"./packages/**\" clean",
"nuke": "pnpm --filter \"./packages/**\" nuke && rimraf node_modules .turbo",
"format": "prettier --write \"**/*.{ts,tsx,md}\"",

View File

@ -0,0 +1,165 @@
import { RunCollectorCallbackHandler } from '@langchain/core/tracers/run_collector'
import { Run } from '@langchain/core/tracers/base'
import { EvaluationRunner } from './EvaluationRunner'
import { encoding_for_model, get_encoding } from '@dqbd/tiktoken'
export class EvaluationRunTracer extends RunCollectorCallbackHandler {
evaluationRunId: string
model: string
constructor(id: string) {
super()
this.evaluationRunId = id
}
async persistRun(run: Run): Promise<void> {
return super.persistRun(run)
}
countPromptTokens = (encoding: any, run: Run): number => {
let promptTokenCount = 0
if (encoding) {
if (run.inputs?.messages?.length > 0 && run.inputs?.messages[0]?.length > 0) {
run.inputs.messages[0].map((message: any) => {
let content = message.content
? message.content
: message.SystemMessage?.content
? message.SystemMessage.content
: message.HumanMessage?.content
? message.HumanMessage.content
: message.AIMessage?.content
? message.AIMessage.content
: undefined
promptTokenCount += content ? encoding.encode(content).length : 0
})
}
if (run.inputs?.prompts?.length > 0) {
const content = run.inputs.prompts[0]
promptTokenCount += content ? encoding.encode(content).length : 0
}
}
return promptTokenCount
}
countCompletionTokens = (encoding: any, run: Run): number => {
let completionTokenCount = 0
if (encoding) {
if (run.outputs?.generations?.length > 0 && run.outputs?.generations[0]?.length > 0) {
run.outputs?.generations[0].map((chunk: any) => {
let content = chunk.text ? chunk.text : chunk.message?.content ? chunk.message?.content : undefined
completionTokenCount += content ? encoding.encode(content).length : 0
})
}
}
return completionTokenCount
}
extractModelName = (run: Run): string => {
return (
(run?.serialized as any)?.kwargs?.model ||
(run?.serialized as any)?.kwargs?.model_name ||
(run?.extra as any)?.metadata?.ls_model_name ||
(run?.extra as any)?.metadata?.fw_model_name
)
}
onLLMEnd?(run: Run): void | Promise<void> {
if (run.name) {
let provider = run.name
if (provider === 'BedrockChat') {
provider = 'awsChatBedrock'
}
EvaluationRunner.addMetrics(
this.evaluationRunId,
JSON.stringify({
provider: provider
})
)
}
let model = this.extractModelName(run)
if (run.outputs?.llmOutput?.tokenUsage) {
const tokenUsage = run.outputs?.llmOutput?.tokenUsage
if (tokenUsage) {
const metric = {
completionTokens: tokenUsage.completionTokens,
promptTokens: tokenUsage.promptTokens,
model: model,
totalTokens: tokenUsage.totalTokens
}
EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify(metric))
}
} else if (
run.outputs?.generations?.length > 0 &&
run.outputs?.generations[0].length > 0 &&
run.outputs?.generations[0][0]?.message?.usage_metadata?.total_tokens
) {
const usage_metadata = run.outputs?.generations[0][0]?.message?.usage_metadata
if (usage_metadata) {
const metric = {
completionTokens: usage_metadata.output_tokens,
promptTokens: usage_metadata.input_tokens,
model: model || this.model,
totalTokens: usage_metadata.total_tokens
}
EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify(metric))
}
} else {
let encoding: any = undefined
let promptInputTokens = 0
let completionTokenCount = 0
try {
encoding = encoding_for_model(model as any)
promptInputTokens = this.countPromptTokens(encoding, run)
completionTokenCount = this.countCompletionTokens(encoding, run)
} catch (e) {
try {
// as tiktoken will fail for non openai models, assume that is 'cl100k_base'
encoding = get_encoding('cl100k_base')
promptInputTokens = this.countPromptTokens(encoding, run)
completionTokenCount = this.countCompletionTokens(encoding, run)
} catch (e) {
// stay silent
}
}
const metric = {
completionTokens: completionTokenCount,
promptTokens: promptInputTokens,
model: model,
totalTokens: promptInputTokens + completionTokenCount
}
EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify(metric))
//cleanup
this.model = ''
}
}
async onRunUpdate(run: Run): Promise<void> {
const json = {
[run.run_type]: elapsed(run)
}
let metric = JSON.stringify(json)
if (metric) {
EvaluationRunner.addMetrics(this.evaluationRunId, metric)
}
if (run.run_type === 'llm') {
let model = this.extractModelName(run)
if (model) {
EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify({ model: model }))
this.model = model
}
// OpenAI non streaming models
const estimatedTokenUsage = run.outputs?.llmOutput?.estimatedTokenUsage
if (estimatedTokenUsage && typeof estimatedTokenUsage === 'object' && Object.keys(estimatedTokenUsage).length > 0) {
EvaluationRunner.addMetrics(this.evaluationRunId, estimatedTokenUsage)
}
}
}
}
function elapsed(run: Run) {
if (!run.end_time) return ''
const elapsed = run.end_time - run.start_time
return `${elapsed.toFixed(2)}`
}

View File

@ -0,0 +1,186 @@
import { ChatMessage, LLMEndEvent, LLMStartEvent, LLMStreamEvent, MessageContentTextDetail, RetrievalEndEvent, Settings } from 'llamaindex'
import { EvaluationRunner } from './EvaluationRunner'
import { additionalCallbacks, ICommonObject, INodeData } from '../src'
import { RetrievalStartEvent } from 'llamaindex/dist/type/llm/types'
import { AgentEndEvent, AgentStartEvent } from 'llamaindex/dist/type/agent/types'
import { encoding_for_model } from '@dqbd/tiktoken'
import { MessageContent } from '@langchain/core/messages'
export class EvaluationRunTracerLlama {
evaluationRunId: string
static cbInit = false
static startTimes = new Map<string, number>()
static models = new Map<string, string>()
static tokenCounts = new Map<string, number>()
constructor(id: string) {
this.evaluationRunId = id
EvaluationRunTracerLlama.constructCallBacks()
}
static constructCallBacks = () => {
if (!EvaluationRunTracerLlama.cbInit) {
Settings.callbackManager.on('llm-start', (event: LLMStartEvent) => {
const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId
if (!evalID) return
const model = (event as any).reason?.caller?.model
if (model) {
EvaluationRunTracerLlama.models.set(evalID, model)
try {
const encoding = encoding_for_model(model)
if (encoding) {
const { messages } = event.detail.payload
let tokenCount = messages.reduce((count: number, message: ChatMessage) => {
return count + encoding.encode(extractText(message.content)).length
}, 0)
EvaluationRunTracerLlama.tokenCounts.set(evalID + '_promptTokens', tokenCount)
EvaluationRunTracerLlama.tokenCounts.set(evalID + '_outputTokens', 0)
}
} catch (e) {
// catch the error and continue to work.
}
}
EvaluationRunTracerLlama.startTimes.set(evalID + '_llm', event.timeStamp)
})
Settings.callbackManager.on('llm-end', (event: LLMEndEvent) => {
this.calculateAndSetMetrics(event, 'llm')
})
Settings.callbackManager.on('llm-stream', (event: LLMStreamEvent) => {
const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId
if (!evalID) return
const { chunk } = event.detail.payload
const { delta } = chunk
const model = (event as any).reason?.caller?.model
try {
const encoding = encoding_for_model(model)
if (encoding) {
let tokenCount = EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens') || 0
tokenCount += encoding.encode(extractText(delta)).length
EvaluationRunTracerLlama.tokenCounts.set(evalID + '_outputTokens', tokenCount)
}
} catch (e) {
// catch the error and continue to work.
}
})
Settings.callbackManager.on('retrieve-start', (event: RetrievalStartEvent) => {
const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId
if (evalID) {
EvaluationRunTracerLlama.startTimes.set(evalID + '_retriever', event.timeStamp)
}
})
Settings.callbackManager.on('retrieve-end', (event: RetrievalEndEvent) => {
this.calculateAndSetMetrics(event, 'retriever')
})
Settings.callbackManager.on('agent-start', (event: AgentStartEvent) => {
const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId
if (evalID) {
EvaluationRunTracerLlama.startTimes.set(evalID + '_agent', event.timeStamp)
}
})
Settings.callbackManager.on('agent-end', (event: AgentEndEvent) => {
this.calculateAndSetMetrics(event, 'agent')
})
EvaluationRunTracerLlama.cbInit = true
}
}
private static calculateAndSetMetrics(event: any, label: string) {
const evalID = event.reason.parent?.caller?.evaluationRunId || event.reason.caller?.evaluationRunId
if (!evalID) return
const startTime = EvaluationRunTracerLlama.startTimes.get(evalID + '_' + label) as number
let model =
(event as any).reason?.caller?.model || (event as any).reason?.caller?.llm?.model || EvaluationRunTracerLlama.models.get(evalID)
if (event.detail.payload?.response?.message && model) {
try {
const encoding = encoding_for_model(model)
if (encoding) {
let tokenCount = EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens') || 0
tokenCount += encoding.encode(event.detail.payload.response?.message?.content || '').length
EvaluationRunTracerLlama.tokenCounts.set(evalID + '_outputTokens', tokenCount)
}
} catch (e) {
// catch the error and continue to work.
}
}
// Anthropic
if (event.detail?.payload?.response?.raw?.usage) {
const usage = event.detail.payload.response.raw.usage
if (usage.output_tokens) {
const metric = {
completionTokens: usage.output_tokens,
promptTokens: usage.input_tokens,
model: model,
totalTokens: usage.input_tokens + usage.output_tokens
}
EvaluationRunner.addMetrics(evalID, JSON.stringify(metric))
} else if (usage.completion_tokens) {
const metric = {
completionTokens: usage.completion_tokens,
promptTokens: usage.prompt_tokens,
model: model,
totalTokens: usage.total_tokens
}
EvaluationRunner.addMetrics(evalID, JSON.stringify(metric))
}
} else if (event.detail?.payload?.response?.raw['amazon-bedrock-invocationMetrics']) {
const usage = event.detail?.payload?.response?.raw['amazon-bedrock-invocationMetrics']
const metric = {
completionTokens: usage.outputTokenCount,
promptTokens: usage.inputTokenCount,
model: event.detail?.payload?.response?.raw.model,
totalTokens: usage.inputTokenCount + usage.outputTokenCount
}
EvaluationRunner.addMetrics(evalID, JSON.stringify(metric))
} else {
const metric = {
[label]: (event.timeStamp - startTime).toFixed(2),
completionTokens: EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens'),
promptTokens: EvaluationRunTracerLlama.tokenCounts.get(evalID + '_promptTokens'),
model: model || EvaluationRunTracerLlama.models.get(evalID) || '',
totalTokens:
(EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens') || 0) +
(EvaluationRunTracerLlama.tokenCounts.get(evalID + '_promptTokens') || 0)
}
EvaluationRunner.addMetrics(evalID, JSON.stringify(metric))
}
//cleanup
EvaluationRunTracerLlama.startTimes.delete(evalID + '_' + label)
EvaluationRunTracerLlama.startTimes.delete(evalID + '_outputTokens')
EvaluationRunTracerLlama.startTimes.delete(evalID + '_promptTokens')
EvaluationRunTracerLlama.models.delete(evalID)
}
static async injectEvaluationMetadata(nodeData: INodeData, options: ICommonObject, callerObj: any) {
if (options.evaluationRunId && callerObj) {
// these are needed for evaluation runs
options.llamaIndex = true
await additionalCallbacks(nodeData, options)
Object.defineProperty(callerObj, 'evaluationRunId', {
enumerable: true,
configurable: true,
writable: true,
value: options.evaluationRunId
})
}
}
}
// from https://github.com/run-llama/LlamaIndexTS/blob/main/packages/core/src/llm/utils.ts
export function extractText(message: MessageContent): string {
if (typeof message !== 'string' && !Array.isArray(message)) {
console.warn('extractText called with non-MessageContent message, this is likely a bug.')
return `${message}`
} else if (typeof message !== 'string' && Array.isArray(message)) {
// message is of type MessageContentDetail[] - retrieve just the text parts and concatenate them
// so we can pass them to the context generator
return message
.filter((c): c is MessageContentTextDetail => c.type === 'text')
.map((c) => c.text)
.join('\n\n')
} else {
return message
}
}

View File

@ -0,0 +1,172 @@
import axios from 'axios'
import { v4 as uuidv4 } from 'uuid'
import { ICommonObject } from '../src'
import { getModelConfigByModelName, MODEL_TYPE } from '../src/modelLoader'
export class EvaluationRunner {
static metrics = new Map<string, string[]>()
static async getAndDeleteMetrics(id: string) {
const val = EvaluationRunner.metrics.get(id)
if (val) {
try {
//first lets get the provider and model
let selectedModel = undefined
let selectedProvider = undefined
if (val && val.length > 0) {
let modelName = ''
let providerName = ''
for (let i = 0; i < val.length; i++) {
const metric = val[i]
if (typeof metric === 'object') {
modelName = metric['model']
providerName = metric['provider']
} else {
modelName = JSON.parse(metric)['model']
providerName = JSON.parse(metric)['provider']
}
if (modelName) {
selectedModel = modelName
}
if (providerName) {
selectedProvider = providerName
}
}
}
let modelConfig = await getModelConfigByModelName(MODEL_TYPE.CHAT, selectedProvider, selectedModel)
if (modelConfig) {
val.push(JSON.stringify({ cost_values: modelConfig }))
} else {
modelConfig = await getModelConfigByModelName(MODEL_TYPE.LLM, selectedProvider, selectedModel)
if (modelConfig) {
val.push(JSON.stringify({ cost_values: modelConfig }))
}
}
} catch (error) {
//stay silent
}
}
EvaluationRunner.metrics.delete(id)
return val
}
static addMetrics(id: string, metric: string) {
if (EvaluationRunner.metrics.has(id)) {
EvaluationRunner.metrics.get(id)?.push(metric)
} else {
EvaluationRunner.metrics.set(id, [metric])
}
}
baseURL = ''
constructor(baseURL: string) {
this.baseURL = baseURL
}
getChatflowApiKey(chatflowId: string, apiKeys: { chatflowId: string; apiKey: string }[] = []) {
return apiKeys.find((item) => item.chatflowId === chatflowId)?.apiKey || ''
}
public async runEvaluations(data: ICommonObject) {
const chatflowIds = JSON.parse(data.chatflowId)
const returnData: ICommonObject = {}
returnData.evaluationId = data.evaluationId
returnData.runDate = new Date()
returnData.rows = []
for (let i = 0; i < data.dataset.rows.length; i++) {
returnData.rows.push({
input: data.dataset.rows[i].input,
expectedOutput: data.dataset.rows[i].output,
itemNo: data.dataset.rows[i].sequenceNo,
evaluations: [],
status: 'pending'
})
}
for (let i = 0; i < chatflowIds.length; i++) {
const chatflowId = chatflowIds[i]
await this.evaluateChatflow(chatflowId, this.getChatflowApiKey(chatflowId, data.apiKeys), data, returnData)
}
return returnData
}
async evaluateChatflow(chatflowId: string, apiKey: string, data: any, returnData: any) {
for (let i = 0; i < data.dataset.rows.length; i++) {
const item = data.dataset.rows[i]
const uuid = uuidv4()
const headers: any = {
'X-Request-ID': uuid,
'X-Flowise-Evaluation': 'true'
}
if (apiKey) {
headers['Authorization'] = `Bearer ${apiKey}`
}
let axiosConfig = {
headers: headers
}
let startTime = performance.now()
const runData: any = {}
runData.chatflowId = chatflowId
runData.startTime = startTime
const postData: any = { question: item.input, evaluationRunId: uuid, evaluation: true }
if (data.sessionId) {
postData.overrideConfig = { sessionId: data.sessionId }
}
try {
let response = await axios.post(`${this.baseURL}/api/v1/prediction/${chatflowId}`, postData, axiosConfig)
const endTime = performance.now()
const timeTaken = (endTime - startTime).toFixed(2)
if (response?.data?.metrics) {
runData.metrics = response.data.metrics
runData.metrics.push({
apiLatency: timeTaken
})
} else {
runData.metrics = [
{
apiLatency: timeTaken
}
]
}
runData.status = 'complete'
let resultText = ''
if (response.data.text) resultText = response.data.text
else if (response.data.json) resultText = '```json\n' + JSON.stringify(response.data.json, null, 2)
else resultText = JSON.stringify(response.data, null, 2)
runData.actualOutput = resultText
runData.latency = timeTaken
runData.error = ''
} catch (error: any) {
runData.status = 'error'
runData.actualOutput = ''
runData.error = error?.response?.data?.message
? error.response.data.message
: error?.message
? error.message
: 'Unknown error'
try {
if (runData.error.indexOf('-') > -1) {
// if there is a dash, remove all content before
runData.error = 'Error: ' + runData.error.substr(runData.error.indexOf('-') + 1).trim()
}
} catch (error) {
//stay silent
}
const endTime = performance.now()
const timeTaken = (endTime - startTime).toFixed(2)
runData.metrics = [
{
apiLatency: timeTaken
}
]
runData.latency = timeTaken
}
runData.uuid = uuid
returnData.rows[i].evaluations.push(runData)
}
return returnData
}
}

View File

@ -427,7 +427,8 @@ class Agent_Agentflow implements INode {
return returnData
}
const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).find()
const searchOptions = options.searchOptions || {}
const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).findBy(searchOptions)
for (const store of stores) {
if (store.status === 'UPSERTED') {
const obj = {

View File

@ -152,7 +152,7 @@ class CustomFunction_Agentflow implements INode {
newState = updateFlowState(state, _customFunctionUpdateState)
}
const variables = await getVars(appDataSource, databaseEntities, nodeData)
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
const flow = {
chatflowId: options.chatflowid,
sessionId: options.sessionId,

View File

@ -127,7 +127,8 @@ class ExecuteFlow_Agentflow implements INode {
return returnData
}
const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).find()
const searchOptions = options.searchOptions || {}
const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).findBy(searchOptions)
for (let i = 0; i < chatflows.length; i += 1) {
let cfType = 'Chatflow'

View File

@ -119,7 +119,8 @@ class Retriever_Agentflow implements INode {
return returnData
}
const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).find()
const searchOptions = options.searchOptions || {}
const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).findBy(searchOptions)
for (const store of stores) {
if (store.status === 'UPSERTED') {
const obj = {

View File

@ -18,7 +18,7 @@ export const addImagesToMessages = async (
for (const upload of imageUploads) {
let bf = upload.data
if (upload.type == 'stored-file') {
const contents = await getFileFromStorage(upload.name, options.chatflowid, options.chatId)
const contents = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId)
// as the image is stored in the server, read the file and convert it to base64
bf = 'data:' + upload.mime + ';base64,' + contents.toString('base64')
@ -90,7 +90,7 @@ export const processMessagesWithImages = async (
hasImageReferences = true
try {
// Get file contents from storage
const contents = await getFileFromStorage(item.name, options.chatflowid, options.chatId)
const contents = await getFileFromStorage(item.name, options.orgId, options.chatflowid, options.chatId)
// Create base64 data URL
const base64Data = 'data:' + item.mime + ';base64,' + contents.toString('base64')
@ -319,7 +319,7 @@ export const getPastChatHistoryImageMessages = async (
const imageContents: MessageContentImageUrl[] = []
for (const upload of uploads) {
if (upload.type === 'stored-file' && upload.mime.startsWith('image/')) {
const fileData = await getFileFromStorage(upload.name, options.chatflowid, options.chatId)
const fileData = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId)
// as the image is stored in the server, read the file and convert it to base64
const bf = 'data:' + upload.mime + ';base64,' + fileData.toString('base64')

View File

@ -128,7 +128,7 @@ class Airtable_Agents implements INode {
let base64String = Buffer.from(JSON.stringify(airtableData)).toString('base64')
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbacks = await additionalCallbacks(nodeData, options)
const pyodide = await LoadPyodide()
@ -163,7 +163,7 @@ json.dumps(my_dict)`
const chain = new LLMChain({
llm: model,
prompt: PromptTemplate.fromTemplate(systemPrompt),
verbose: process.env.DEBUG === 'true'
verbose: process.env.DEBUG === 'true' ? true : false
})
const inputs = {
dict: dataframeColDict,
@ -192,7 +192,7 @@ json.dumps(my_dict)`
const chain = new LLMChain({
llm: model,
prompt: PromptTemplate.fromTemplate(finalSystemPrompt),
verbose: process.env.DEBUG === 'true'
verbose: process.env.DEBUG === 'true' ? true : false
})
const inputs = {
question: input,

View File

@ -97,7 +97,7 @@ class CSV_Agents implements INode {
}
}
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const shouldStreamResponse = options.shouldStreamResponse
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
const chatId = options.chatId
@ -114,11 +114,12 @@ class CSV_Agents implements INode {
} else {
files = [fileName]
}
const orgId = options.orgId
const chatflowid = options.chatflowid
for (const file of files) {
if (!file) continue
const fileData = await getFileFromStorage(file, chatflowid)
const fileData = await getFileFromStorage(file, orgId, chatflowid)
base64String += fileData.toString('base64')
}
} else {
@ -170,7 +171,7 @@ json.dumps(my_dict)`
const chain = new LLMChain({
llm: model,
prompt: PromptTemplate.fromTemplate(systemPrompt),
verbose: process.env.DEBUG === 'true'
verbose: process.env.DEBUG === 'true' ? true : false
})
const inputs = {
dict: dataframeColDict,
@ -201,7 +202,7 @@ json.dumps(my_dict)`
prompt: PromptTemplate.fromTemplate(
systemMessagePrompt ? `${systemMessagePrompt}\n${finalSystemPrompt}` : finalSystemPrompt
),
verbose: process.env.DEBUG === 'true'
verbose: process.env.DEBUG === 'true' ? true : false
})
const inputs = {
question: input,

View File

@ -132,7 +132,7 @@ class ConversationalAgent_Agents implements INode {
}
const executor = await prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbacks = await additionalCallbacks(nodeData, options)
let res: ChainValues = {}

View File

@ -130,7 +130,7 @@ class ConversationalRetrievalToolAgent_Agents implements INode {
const executor = await prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbacks = await additionalCallbacks(nodeData, options)
let res: ChainValues = {}
@ -288,7 +288,7 @@ const prepareAgent = async (
sessionId: flowObj?.sessionId,
chatId: flowObj?.chatId,
input: flowObj?.input,
verbose: process.env.DEBUG === 'true',
verbose: process.env.DEBUG === 'true' ? true : false,
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
})

View File

@ -2,6 +2,7 @@ import { flatten } from 'lodash'
import { MessageContentTextDetail, ChatMessage, AnthropicAgent, Anthropic } from 'llamaindex'
import { getBaseClasses } from '../../../../src/utils'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../../src/Interface'
import { EvaluationRunTracerLlama } from '../../../../evaluation/EvaluationRunTracerLlama'
class AnthropicAgent_LlamaIndex_Agents implements INode {
label: string
@ -96,13 +97,16 @@ class AnthropicAgent_LlamaIndex_Agents implements INode {
tools,
llm: model,
chatHistory: chatHistory,
verbose: process.env.DEBUG === 'true'
verbose: process.env.DEBUG === 'true' ? true : false
})
// these are needed for evaluation runs
await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, agent)
let text = ''
const usedTools: IUsedTool[] = []
const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' })
const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' ? true : false })
if (response.sources.length) {
for (const sourceTool of response.sources) {

View File

@ -1,6 +1,7 @@
import { flatten } from 'lodash'
import { ChatMessage, OpenAI, OpenAIAgent } from 'llamaindex'
import { getBaseClasses } from '../../../../src/utils'
import { EvaluationRunTracerLlama } from '../../../../evaluation/EvaluationRunTracerLlama'
import {
FlowiseMemory,
ICommonObject,
@ -107,9 +108,12 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
tools,
llm: model,
chatHistory: chatHistory,
verbose: process.env.DEBUG === 'true'
verbose: process.env.DEBUG === 'true' ? true : false
})
// these are needed for evaluation runs
await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, agent)
let text = ''
let isStreamingStarted = false
const usedTools: IUsedTool[] = []
@ -119,10 +123,9 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
message: input,
chatHistory,
stream: true,
verbose: process.env.DEBUG === 'true'
verbose: process.env.DEBUG === 'true' ? true : false
})
for await (const chunk of stream) {
//console.log('chunk', chunk)
text += chunk.response.delta
if (!isStreamingStarted) {
isStreamingStarted = true
@ -147,7 +150,7 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
}
}
} else {
const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' })
const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' ? true : false })
if (response.sources.length) {
for (const sourceTool of response.sources) {
usedTools.push({

View File

@ -107,7 +107,11 @@ class OpenAIAssistant_Agents implements INode {
return returnData
}
const assistants = await appDataSource.getRepository(databaseEntities['Assistant']).find()
const searchOptions = options.searchOptions || {}
const assistants = await appDataSource.getRepository(databaseEntities['Assistant']).findBy({
...searchOptions,
type: 'OPENAI'
})
for (let i = 0; i < assistants.length; i += 1) {
const assistantDetails = JSON.parse(assistants[i].details)
@ -130,13 +134,14 @@ class OpenAIAssistant_Agents implements INode {
const selectedAssistantId = nodeData.inputs?.selectedAssistant as string
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
const orgId = options.orgId
const assistant = await appDataSource.getRepository(databaseEntities['Assistant']).findOneBy({
id: selectedAssistantId
})
if (!assistant) {
options.logger.error(`Assistant ${selectedAssistantId} not found`)
options.logger.error(`[${orgId}]: Assistant ${selectedAssistantId} not found`)
return
}
@ -149,7 +154,7 @@ class OpenAIAssistant_Agents implements INode {
chatId
})
if (!chatmsg) {
options.logger.error(`Chat Message with Chat Id: ${chatId} not found`)
options.logger.error(`[${orgId}]: Chat Message with Chat Id: ${chatId} not found`)
return
}
sessionId = chatmsg.sessionId
@ -160,21 +165,21 @@ class OpenAIAssistant_Agents implements INode {
const credentialData = await getCredentialData(assistant.credential ?? '', options)
const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, nodeData)
if (!openAIApiKey) {
options.logger.error(`OpenAI ApiKey not found`)
options.logger.error(`[${orgId}]: OpenAI ApiKey not found`)
return
}
const openai = new OpenAI({ apiKey: openAIApiKey })
options.logger.info(`Clearing OpenAI Thread ${sessionId}`)
options.logger.info(`[${orgId}]: Clearing OpenAI Thread ${sessionId}`)
try {
if (sessionId && sessionId.startsWith('thread_')) {
await openai.beta.threads.del(sessionId)
options.logger.info(`Successfully cleared OpenAI Thread ${sessionId}`)
options.logger.info(`[${orgId}]: Successfully cleared OpenAI Thread ${sessionId}`)
} else {
options.logger.error(`Error clearing OpenAI Thread ${sessionId}`)
options.logger.error(`[${orgId}]: Error clearing OpenAI Thread ${sessionId}`)
}
} catch (e) {
options.logger.error(`Error clearing OpenAI Thread ${sessionId}`)
options.logger.error(`[${orgId}]: Error clearing OpenAI Thread ${sessionId}`)
}
}
@ -190,6 +195,17 @@ class OpenAIAssistant_Agents implements INode {
const shouldStreamResponse = options.shouldStreamResponse
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
const chatId = options.chatId
const checkStorage = options.checkStorage
? (options.checkStorage as (orgId: string, subscriptionId: string, usageCacheManager: any) => Promise<void>)
: undefined
const updateStorageUsage = options.updateStorageUsage
? (options.updateStorageUsage as (
orgId: string,
workspaceId: string,
totalSize: number,
usageCacheManager: any
) => Promise<void>)
: undefined
if (moderations && moderations.length > 0) {
try {
@ -380,17 +396,30 @@ class OpenAIAssistant_Agents implements INode {
// eslint-disable-next-line no-useless-escape
const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename
if (!disableFileDownload) {
filePath = await downloadFile(
if (checkStorage)
await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager)
const { path, totalSize } = await downloadFile(
openAIApiKey,
cited_file,
fileName,
options.orgId,
options.chatflowid,
options.chatId
)
filePath = path
fileAnnotations.push({
filePath,
fileName
})
if (updateStorageUsage)
await updateStorageUsage(
options.orgId,
options.workspaceId,
totalSize,
options.usageCacheManager
)
}
} else {
const file_path = (annotation as OpenAI.Beta.Threads.Messages.FilePathAnnotation).file_path
@ -399,17 +428,30 @@ class OpenAIAssistant_Agents implements INode {
// eslint-disable-next-line no-useless-escape
const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename
if (!disableFileDownload) {
filePath = await downloadFile(
if (checkStorage)
await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager)
const { path, totalSize } = await downloadFile(
openAIApiKey,
cited_file,
fileName,
options.orgId,
options.chatflowid,
options.chatId
)
filePath = path
fileAnnotations.push({
filePath,
fileName
})
if (updateStorageUsage)
await updateStorageUsage(
options.orgId,
options.workspaceId,
totalSize,
options.usageCacheManager
)
}
}
}
@ -467,15 +509,21 @@ class OpenAIAssistant_Agents implements INode {
const fileId = chunk.image_file.file_id
const fileObj = await openai.files.retrieve(fileId)
const filePath = await downloadImg(
if (checkStorage) await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager)
const { filePath, totalSize } = await downloadImg(
openai,
fileId,
`${fileObj.filename}.png`,
options.orgId,
options.chatflowid,
options.chatId
)
artifacts.push({ type: 'png', data: filePath })
if (updateStorageUsage)
await updateStorageUsage(options.orgId, options.workspaceId, totalSize, options.usageCacheManager)
if (!isStreamingStarted) {
isStreamingStarted = true
if (sseStreamer) {
@ -776,7 +824,21 @@ class OpenAIAssistant_Agents implements INode {
// eslint-disable-next-line no-useless-escape
const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename
if (!disableFileDownload) {
filePath = await downloadFile(openAIApiKey, cited_file, fileName, options.chatflowid, options.chatId)
if (checkStorage) await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager)
const { path, totalSize } = await downloadFile(
openAIApiKey,
cited_file,
fileName,
options.orgId,
options.chatflowid,
options.chatId
)
filePath = path
if (updateStorageUsage)
await updateStorageUsage(options.orgId, options.workspaceId, totalSize, options.usageCacheManager)
fileAnnotations.push({
filePath,
fileName
@ -789,13 +851,27 @@ class OpenAIAssistant_Agents implements INode {
// eslint-disable-next-line no-useless-escape
const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename
if (!disableFileDownload) {
filePath = await downloadFile(
if (checkStorage)
await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager)
const { path, totalSize } = await downloadFile(
openAIApiKey,
cited_file,
fileName,
options.orgId,
options.chatflowid,
options.chatId
)
filePath = path
if (updateStorageUsage)
await updateStorageUsage(
options.orgId,
options.workspaceId,
totalSize,
options.usageCacheManager
)
fileAnnotations.push({
filePath,
fileName
@ -822,7 +898,20 @@ class OpenAIAssistant_Agents implements INode {
const fileId = content.image_file.file_id
const fileObj = await openai.files.retrieve(fileId)
const filePath = await downloadImg(openai, fileId, `${fileObj.filename}.png`, options.chatflowid, options.chatId)
if (checkStorage) await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager)
const { filePath, totalSize } = await downloadImg(
openai,
fileId,
`${fileObj.filename}.png`,
options.orgId,
options.chatflowid,
options.chatId
)
if (updateStorageUsage)
await updateStorageUsage(options.orgId, options.workspaceId, totalSize, options.usageCacheManager)
artifacts.push({ type: 'png', data: filePath })
}
}
@ -847,7 +936,13 @@ class OpenAIAssistant_Agents implements INode {
}
}
const downloadImg = async (openai: OpenAI, fileId: string, fileName: string, ...paths: string[]) => {
const downloadImg = async (
openai: OpenAI,
fileId: string,
fileName: string,
orgId: string,
...paths: string[]
): Promise<{ filePath: string; totalSize: number }> => {
const response = await openai.files.content(fileId)
// Extract the binary data from the Response object
@ -857,12 +952,18 @@ const downloadImg = async (openai: OpenAI, fileId: string, fileName: string, ...
const image_data_buffer = Buffer.from(image_data)
const mime = 'image/png'
const res = await addSingleFileToStorage(mime, image_data_buffer, fileName, ...paths)
const { path, totalSize } = await addSingleFileToStorage(mime, image_data_buffer, fileName, orgId, ...paths)
return res
return { filePath: path, totalSize }
}
const downloadFile = async (openAIApiKey: string, fileObj: any, fileName: string, ...paths: string[]) => {
const downloadFile = async (
openAIApiKey: string,
fileObj: any,
fileName: string,
orgId: string,
...paths: string[]
): Promise<{ path: string; totalSize: number }> => {
try {
const response = await fetch(`https://api.openai.com/v1/files/${fileObj.id}/content`, {
method: 'GET',
@ -880,10 +981,12 @@ const downloadFile = async (openAIApiKey: string, fileObj: any, fileName: string
const data_buffer = Buffer.from(data)
const mime = 'application/octet-stream'
return await addSingleFileToStorage(mime, data_buffer, fileName, ...paths)
const { path, totalSize } = await addSingleFileToStorage(mime, data_buffer, fileName, orgId, ...paths)
return { path, totalSize }
} catch (error) {
console.error('Error downloading or writing the file:', error)
return ''
return { path: '', totalSize: 0 }
}
}

View File

@ -97,7 +97,7 @@ class ReActAgentLLM_Agents implements INode {
const executor = new AgentExecutor({
agent,
tools,
verbose: process.env.DEBUG === 'true',
verbose: process.env.DEBUG === 'true' ? true : false,
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
})

View File

@ -143,7 +143,7 @@ class ToolAgent_Agents implements INode {
const executor = await prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbacks = await additionalCallbacks(nodeData, options)
// Add custom streaming handler if detailed streaming is enabled
@ -370,7 +370,7 @@ const prepareAgent = async (
sessionId: flowObj?.sessionId,
chatId: flowObj?.chatId,
input: flowObj?.input,
verbose: process.env.DEBUG === 'true',
verbose: process.env.DEBUG === 'true' ? true : false,
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
})

View File

@ -138,7 +138,7 @@ class XMLAgent_Agents implements INode {
}
const executor = await prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbacks = await additionalCallbacks(nodeData, options)
let res: ChainValues = {}
@ -278,7 +278,7 @@ const prepareAgent = async (
chatId: flowObj?.chatId,
input: flowObj?.input,
isXML: true,
verbose: process.env.DEBUG === 'true',
verbose: process.env.DEBUG === 'true' ? true : false,
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
})

View File

@ -98,7 +98,7 @@ class GETApiChain_Chains implements INode {
const ansPrompt = nodeData.inputs?.ansPrompt as string
const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt)
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbacks = await additionalCallbacks(nodeData, options)
const shouldStreamResponse = options.shouldStreamResponse
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
@ -129,7 +129,7 @@ const getAPIChain = async (documents: string, llm: BaseLanguageModel, headers: s
const chain = APIChain.fromLLMAndAPIDocs(llm, documents, {
apiUrlPrompt,
apiResponsePrompt,
verbose: process.env.DEBUG === 'true',
verbose: process.env.DEBUG === 'true' ? true : false,
headers: typeof headers === 'object' ? headers : headers ? JSON.parse(headers) : {}
})
return chain

View File

@ -71,7 +71,7 @@ class OpenApiChain_Chains implements INode {
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | object> {
const chain = await initChain(nodeData, options)
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbacks = await additionalCallbacks(nodeData, options)
const moderations = nodeData.inputs?.inputModeration as Moderation[]
const shouldStreamResponse = options.shouldStreamResponse
@ -114,8 +114,9 @@ const initChain = async (nodeData: INodeData, options: ICommonObject) => {
} else {
if (yamlFileBase64.startsWith('FILE-STORAGE::')) {
const file = yamlFileBase64.replace('FILE-STORAGE::', '')
const orgId = options.orgId
const chatflowid = options.chatflowid
const fileData = await getFileFromStorage(file, chatflowid)
const fileData = await getFileFromStorage(file, orgId, chatflowid)
yamlString = fileData.toString()
} else {
const splitDataURI = yamlFileBase64.split(',')
@ -128,7 +129,7 @@ const initChain = async (nodeData: INodeData, options: ICommonObject) => {
return await createOpenAPIChain(yamlString, {
llm: model,
headers: typeof headers === 'object' ? headers : headers ? JSON.parse(headers) : {},
verbose: process.env.DEBUG === 'true'
verbose: process.env.DEBUG === 'true' ? true : false
})
}

View File

@ -87,7 +87,7 @@ class POSTApiChain_Chains implements INode {
const ansPrompt = nodeData.inputs?.ansPrompt as string
const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt)
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbacks = await additionalCallbacks(nodeData, options)
const shouldStreamResponse = options.shouldStreamResponse
@ -119,7 +119,7 @@ const getAPIChain = async (documents: string, llm: BaseLanguageModel, headers: s
const chain = APIChain.fromLLMAndAPIDocs(llm, documents, {
apiUrlPrompt,
apiResponsePrompt,
verbose: process.env.DEBUG === 'true',
verbose: process.env.DEBUG === 'true' ? true : false,
headers: typeof headers === 'object' ? headers : headers ? JSON.parse(headers) : {}
})
return chain

View File

@ -132,7 +132,7 @@ class ConversationChain_Chains implements INode {
}
}
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const additionalCallback = await additionalCallbacks(nodeData, options)
let res = ''

View File

@ -185,6 +185,7 @@ class ConversationalRetrievalQAChain_Chains implements INode {
const shouldStreamResponse = options.shouldStreamResponse
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
const chatId = options.chatId
const orgId = options.orgId
let customResponsePrompt = responsePrompt
// If the deprecated systemMessagePrompt is still exists
@ -200,7 +201,8 @@ class ConversationalRetrievalQAChain_Chains implements INode {
memoryKey: 'chat_history',
appDataSource,
databaseEntities,
chatflowid
chatflowid,
orgId
})
}
@ -220,7 +222,7 @@ class ConversationalRetrievalQAChain_Chains implements INode {
const history = ((await memory.getChatMessages(this.sessionId, false, prependMessages)) as IMessage[]) ?? []
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const additionalCallback = await additionalCallbacks(nodeData, options)
let callbacks = [loggerHandler, ...additionalCallback]
@ -407,18 +409,21 @@ interface BufferMemoryExtendedInput {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
orgId: string
}
class BufferMemory extends FlowiseMemory implements MemoryMethods {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
orgId: string
constructor(fields: BufferMemoryInput & BufferMemoryExtendedInput) {
super(fields)
this.appDataSource = fields.appDataSource
this.databaseEntities = fields.databaseEntities
this.chatflowid = fields.chatflowid
this.orgId = fields.orgId
}
async getChatMessages(
@ -443,7 +448,7 @@ class BufferMemory extends FlowiseMemory implements MemoryMethods {
}
if (returnBaseMessages) {
return await mapChatMessageToBaseMessage(chatMessage)
return await mapChatMessageToBaseMessage(chatMessage, this.orgId)
}
let returnIMessages: IMessage[] = []

View File

@ -215,7 +215,7 @@ class GraphCypherQA_Chain implements INode {
query: input
}
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbackHandlers = await additionalCallbacks(nodeData, options)
let callbacks = [loggerHandler, ...callbackHandlers]

View File

@ -167,7 +167,7 @@ const runPrediction = async (
nodeData: INodeData,
disableStreaming?: boolean
) => {
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbacks = await additionalCallbacks(nodeData, options)
const moderations = nodeData.inputs?.inputModeration as Moderation[]

View File

@ -66,7 +66,7 @@ class MultiPromptChain_Chains implements INode {
promptNames,
promptDescriptions,
promptTemplates,
llmChainOpts: { verbose: process.env.DEBUG === 'true' }
llmChainOpts: { verbose: process.env.DEBUG === 'true' ? true : false }
})
return chain
@ -95,7 +95,7 @@ class MultiPromptChain_Chains implements INode {
}
const obj = { input }
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbacks = await additionalCallbacks(nodeData, options)
if (shouldStreamResponse) {

View File

@ -74,7 +74,7 @@ class MultiRetrievalQAChain_Chains implements INode {
retrieverNames,
retrieverDescriptions,
retrievers,
retrievalQAChainOpts: { verbose: process.env.DEBUG === 'true', returnSourceDocuments }
retrievalQAChainOpts: { verbose: process.env.DEBUG === 'true' ? true : false, returnSourceDocuments }
})
return chain
}
@ -101,7 +101,7 @@ class MultiRetrievalQAChain_Chains implements INode {
}
}
const obj = { input }
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbacks = await additionalCallbacks(nodeData, options)
if (shouldStreamResponse) {

View File

@ -53,7 +53,7 @@ class RetrievalQAChain_Chains implements INode {
const model = nodeData.inputs?.model as BaseLanguageModel
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever
const chain = RetrievalQAChain.fromLLM(model, vectorStoreRetriever, { verbose: process.env.DEBUG === 'true' })
const chain = RetrievalQAChain.fromLLM(model, vectorStoreRetriever, { verbose: process.env.DEBUG === 'true' ? true : false })
return chain
}
@ -80,7 +80,7 @@ class RetrievalQAChain_Chains implements INode {
const obj = {
query: input
}
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbacks = await additionalCallbacks(nodeData, options)
if (shouldStreamResponse) {

View File

@ -194,7 +194,7 @@ class SqlDatabaseChain_Chains implements INode {
topK,
customPrompt
)
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbacks = await additionalCallbacks(nodeData, options)
if (shouldStreamResponse) {
@ -241,7 +241,7 @@ const getSQLDBChain = async (
const obj: SqlDatabaseChainInput = {
llm,
database: db,
verbose: process.env.DEBUG === 'true',
verbose: process.env.DEBUG === 'true' ? true : false,
topK: topK
}

View File

@ -55,7 +55,7 @@ class VectorDBQAChain_Chains implements INode {
const chain = VectorDBQAChain.fromLLM(model, vectorStore, {
k: (vectorStore as any)?.k ?? 4,
verbose: process.env.DEBUG === 'true'
verbose: process.env.DEBUG === 'true' ? true : false
})
return chain
}
@ -84,7 +84,7 @@ class VectorDBQAChain_Chains implements INode {
query: input
}
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId)
const callbacks = await additionalCallbacks(nodeData, options)
if (shouldStreamResponse) {

View File

@ -4,13 +4,13 @@ Azure OpenAI Chat Model integration for Flowise
## 🌱 Env Variables
| Variable | Description | Type | Default |
| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- |
| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI Model | String | |
| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI Model | String | |
| AZURE_OPENAI_API_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI Model | String | |
| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI Model | String | |
| Variable | Description | Type | Default |
| -------------------------------- | ------------------------------------------------------------------------ | ------ | ------- |
| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI Model | String | |
| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI Model | String | |
| AZURE_OPENAI_API_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI Model | String | |
| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI Model | String | |
## License
Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md).
Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md).

View File

@ -216,6 +216,10 @@ class GoogleGenerativeAI_ChatModels implements INode {
streaming: streaming ?? true
}
// this extra metadata is needed, as langchain does not show the model name in the callbacks.
obj.metadata = {
fw_model_name: customModelName || modelName
}
if (maxOutputTokens) obj.maxOutputTokens = parseInt(maxOutputTokens, 10)
if (topP) obj.topP = parseFloat(topP)
if (topK) obj.topK = parseFloat(topK)

View File

@ -161,12 +161,13 @@ class ChatIBMWatsonx_ChatModels implements INode {
watsonxAIBearerToken
}
const obj: ChatWatsonxInput & WatsonxAuth = {
const obj = {
...auth,
streaming: streaming ?? true,
model: modelName,
temperature: temperature ? parseFloat(temperature) : undefined
}
} as ChatWatsonxInput & WatsonxAuth
if (cache) obj.cache = cache
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10)

View File

@ -123,6 +123,7 @@ class Cheerio_DocumentLoaders implements INode {
const selectedLinks = nodeData.inputs?.selectedLinks as string[]
let limit = parseInt(nodeData.inputs?.limit as string)
const output = nodeData.outputs?.output as string
const orgId = options.orgId
const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string
@ -149,7 +150,8 @@ class Cheerio_DocumentLoaders implements INode {
try {
let docs: IDocument[] = []
if (url.endsWith('.pdf')) {
if (process.env.DEBUG === 'true') options.logger.info(`CheerioWebBaseLoader does not support PDF files: ${url}`)
if (process.env.DEBUG === 'true')
options.logger.info(`[${orgId}]: CheerioWebBaseLoader does not support PDF files: ${url}`)
return docs
}
const loader = new CheerioWebBaseLoader(url, params)
@ -161,7 +163,8 @@ class Cheerio_DocumentLoaders implements INode {
}
return docs
} catch (err) {
if (process.env.DEBUG === 'true') options.logger.error(`error in CheerioWebBaseLoader: ${err.message}, on page: ${url}`)
if (process.env.DEBUG === 'true')
options.logger.error(`[${orgId}]: Error in CheerioWebBaseLoader: ${err.message}, on page: ${url}`)
return []
}
}
@ -169,7 +172,7 @@ class Cheerio_DocumentLoaders implements INode {
let docs: IDocument[] = []
if (relativeLinksMethod) {
if (process.env.DEBUG === 'true') options.logger.info(`Start ${relativeLinksMethod}`)
if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Start CheerioWebBaseLoader ${relativeLinksMethod}`)
// if limit is 0 we don't want it to default to 10 so we check explicitly for null or undefined
// so when limit is 0 we can fetch all the links
if (limit === null || limit === undefined) limit = 10
@ -180,15 +183,18 @@ class Cheerio_DocumentLoaders implements INode {
: relativeLinksMethod === 'webCrawl'
? await webCrawl(url, limit)
: await xmlScrape(url, limit)
if (process.env.DEBUG === 'true') options.logger.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`)
if (process.env.DEBUG === 'true')
options.logger.info(`[${orgId}]: CheerioWebBaseLoader pages: ${JSON.stringify(pages)}, length: ${pages.length}`)
if (!pages || pages.length === 0) throw new Error('No relative links found')
for (const page of pages) {
docs.push(...(await cheerioLoader(page)))
}
if (process.env.DEBUG === 'true') options.logger.info(`Finish ${relativeLinksMethod}`)
if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Finish CheerioWebBaseLoader ${relativeLinksMethod}`)
} else if (selectedLinks && selectedLinks.length > 0) {
if (process.env.DEBUG === 'true')
options.logger.info(`pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}`)
options.logger.info(
`[${orgId}]: CheerioWebBaseLoader pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}`
)
for (const page of selectedLinks.slice(0, limit)) {
docs.push(...(await cheerioLoader(page)))
}

View File

@ -107,9 +107,9 @@ class Csv_DocumentLoaders implements INode {
return { files, fromStorage }
}
async getFileData(file: string, { chatflowid }: { chatflowid: string }, fromStorage?: boolean) {
async getFileData(file: string, { orgId, chatflowid }: { orgId: string; chatflowid: string }, fromStorage?: boolean) {
if (fromStorage) {
return getFileFromStorage(file, chatflowid)
return getFileFromStorage(file, orgId, chatflowid)
} else {
const splitDataURI = file.split(',')
splitDataURI.pop()
@ -126,6 +126,7 @@ class Csv_DocumentLoaders implements INode {
let docs: IDocument[] = []
const orgId = options.orgId
const chatflowid = options.chatflowid
const { files, fromStorage } = this.getFiles(nodeData)
@ -133,7 +134,7 @@ class Csv_DocumentLoaders implements INode {
for (const file of files) {
if (!file) continue
const fileData = await this.getFileData(file, { chatflowid }, fromStorage)
const fileData = await this.getFileData(file, { orgId, chatflowid }, fromStorage)
const blob = new Blob([fileData])
const loader = new CSVLoader(blob, columnName.trim().length === 0 ? undefined : columnName.trim())

View File

@ -72,7 +72,7 @@ class CustomDocumentLoader_DocumentLoaders implements INode {
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
const variables = await getVars(appDataSource, databaseEntities, nodeData)
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
const flow = {
chatflowId: options.chatflowid,
sessionId: options.sessionId,

View File

@ -60,7 +60,8 @@ class DocStore_DocumentLoaders implements INode {
return returnData
}
const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).find()
const searchOptions = options.searchOptions || {}
const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).findBy(searchOptions)
for (const store of stores) {
if (store.status === 'SYNC') {
const obj = {

View File

@ -96,11 +96,12 @@ class Docx_DocumentLoaders implements INode {
} else {
files = [fileName]
}
const orgId = options.orgId
const chatflowid = options.chatflowid
for (const file of files) {
if (!file) continue
const fileData = await getFileFromStorage(file, chatflowid)
const fileData = await getFileFromStorage(file, orgId, chatflowid)
const blob = new Blob([fileData])
const loader = new DocxLoader(blob)

View File

@ -118,10 +118,11 @@ class Epub_DocumentLoaders implements INode {
files = fileName.startsWith('[') && fileName.endsWith(']') ? JSON.parse(fileName) : [fileName]
const chatflowid = options.chatflowid
const orgId = options.orgId
for (const file of files) {
if (!file) continue
const fileData = await getFileFromStorage(file, chatflowid)
const fileData = await getFileFromStorage(file, orgId, chatflowid)
const tempFilePath = path.join(tempDir, `${Date.now()}_${file}`)
fs.writeFileSync(tempFilePath, fileData)
await this.extractDocs(usage, tempFilePath, textSplitter, docs)

View File

@ -144,6 +144,7 @@ class File_DocumentLoaders implements INode {
} else {
files = [fileName]
}
const orgId = options.orgId
const chatflowid = options.chatflowid
// specific to createAttachment to get files from chatId
@ -151,14 +152,14 @@ class File_DocumentLoaders implements INode {
if (retrieveAttachmentChatId) {
for (const file of files) {
if (!file) continue
const fileData = await getFileFromStorage(file, chatflowid, options.chatId)
const fileData = await getFileFromStorage(file, orgId, chatflowid, options.chatId)
const blob = new Blob([fileData])
fileBlobs.push({ blob, ext: file.split('.').pop() || '' })
}
} else {
for (const file of files) {
if (!file) continue
const fileData = await getFileFromStorage(file, chatflowid)
const fileData = await getFileFromStorage(file, orgId, chatflowid)
const blob = new Blob([fileData])
fileBlobs.push({ blob, ext: file.split('.').pop() || '' })
}

View File

@ -146,11 +146,12 @@ class Json_DocumentLoaders implements INode {
} else {
files = [fileName]
}
const orgId = options.orgId
const chatflowid = options.chatflowid
for (const file of files) {
if (!file) continue
const fileData = await getFileFromStorage(file, chatflowid)
const fileData = await getFileFromStorage(file, orgId, chatflowid)
const blob = new Blob([fileData])
const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata)

View File

@ -135,11 +135,12 @@ class Jsonlines_DocumentLoaders implements INode {
} else {
files = [fileName]
}
const orgId = options.orgId
const chatflowid = options.chatflowid
for (const file of files) {
if (!file) continue
const fileData = await getFileFromStorage(file, chatflowid)
const fileData = await getFileFromStorage(file, orgId, chatflowid)
const blob = new Blob([fileData])
const loader = new JSONLinesLoader(blob, pointer, metadata)

View File

@ -122,11 +122,12 @@ class Pdf_DocumentLoaders implements INode {
} else {
files = [fileName]
}
const orgId = options.orgId
const chatflowid = options.chatflowid
for (const file of files) {
if (!file) continue
const fileData = await getFileFromStorage(file, chatflowid)
const fileData = await getFileFromStorage(file, orgId, chatflowid)
const bf = Buffer.from(fileData)
await this.extractDocs(usage, bf, legacyBuild, textSplitter, docs)
}

View File

@ -159,6 +159,7 @@ class Playwright_DocumentLoaders implements INode {
let waitForSelector = nodeData.inputs?.waitForSelector as string
const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string
const output = nodeData.outputs?.output as string
const orgId = options.orgId
let omitMetadataKeys: string[] = []
if (_omitMetadataKeys) {
@ -202,13 +203,14 @@ class Playwright_DocumentLoaders implements INode {
}
return docs
} catch (err) {
if (process.env.DEBUG === 'true') options.logger.error(`error in PlaywrightWebBaseLoader: ${err.message}, on page: ${url}`)
if (process.env.DEBUG === 'true')
options.logger.error(`[${orgId}]: Error in PlaywrightWebBaseLoader: ${err.message}, on page: ${url}`)
}
}
let docs: IDocument[] = []
if (relativeLinksMethod) {
if (process.env.DEBUG === 'true') options.logger.info(`Start ${relativeLinksMethod}`)
if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Start PlaywrightWebBaseLoader ${relativeLinksMethod}`)
// if limit is 0 we don't want it to default to 10 so we check explicitly for null or undefined
// so when limit is 0 we can fetch all the links
if (limit === null || limit === undefined) limit = 10
@ -219,15 +221,18 @@ class Playwright_DocumentLoaders implements INode {
: relativeLinksMethod === 'webCrawl'
? await webCrawl(url, limit)
: await xmlScrape(url, limit)
if (process.env.DEBUG === 'true') options.logger.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`)
if (process.env.DEBUG === 'true')
options.logger.info(`[${orgId}]: PlaywrightWebBaseLoader pages: ${JSON.stringify(pages)}, length: ${pages.length}`)
if (!pages || pages.length === 0) throw new Error('No relative links found')
for (const page of pages) {
docs.push(...(await playwrightLoader(page)))
}
if (process.env.DEBUG === 'true') options.logger.info(`Finish ${relativeLinksMethod}`)
if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Finish PlaywrightWebBaseLoader ${relativeLinksMethod}`)
} else if (selectedLinks && selectedLinks.length > 0) {
if (process.env.DEBUG === 'true')
options.logger.info(`pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}`)
options.logger.info(
`[${orgId}]: PlaywrightWebBaseLoader pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}`
)
for (const page of selectedLinks.slice(0, limit)) {
docs.push(...(await playwrightLoader(page)))
}

View File

@ -155,6 +155,7 @@ class Puppeteer_DocumentLoaders implements INode {
let waitForSelector = nodeData.inputs?.waitForSelector as string
const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string
const output = nodeData.outputs?.output as string
const orgId = options.orgId
let omitMetadataKeys: string[] = []
if (_omitMetadataKeys) {
@ -198,13 +199,14 @@ class Puppeteer_DocumentLoaders implements INode {
}
return docs
} catch (err) {
if (process.env.DEBUG === 'true') options.logger.error(`error in PuppeteerWebBaseLoader: ${err.message}, on page: ${url}`)
if (process.env.DEBUG === 'true')
options.logger.error(`[${orgId}]: Error in PuppeteerWebBaseLoader: ${err.message}, on page: ${url}`)
}
}
let docs: IDocument[] = []
if (relativeLinksMethod) {
if (process.env.DEBUG === 'true') options.logger.info(`Start ${relativeLinksMethod}`)
if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Start PuppeteerWebBaseLoader ${relativeLinksMethod}`)
// if limit is 0 we don't want it to default to 10 so we check explicitly for null or undefined
// so when limit is 0 we can fetch all the links
if (limit === null || limit === undefined) limit = 10
@ -215,15 +217,18 @@ class Puppeteer_DocumentLoaders implements INode {
: relativeLinksMethod === 'webCrawl'
? await webCrawl(url, limit)
: await xmlScrape(url, limit)
if (process.env.DEBUG === 'true') options.logger.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`)
if (process.env.DEBUG === 'true')
options.logger.info(`[${orgId}]: PuppeteerWebBaseLoader pages: ${JSON.stringify(pages)}, length: ${pages.length}`)
if (!pages || pages.length === 0) throw new Error('No relative links found')
for (const page of pages) {
docs.push(...(await puppeteerLoader(page)))
}
if (process.env.DEBUG === 'true') options.logger.info(`Finish ${relativeLinksMethod}`)
if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Finish PuppeteerWebBaseLoader ${relativeLinksMethod}`)
} else if (selectedLinks && selectedLinks.length > 0) {
if (process.env.DEBUG === 'true')
options.logger.info(`pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}`)
options.logger.info(
`[${orgId}]: PuppeteerWebBaseLoader pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}`
)
for (const page of selectedLinks.slice(0, limit)) {
docs.push(...(await puppeteerLoader(page)))
}

View File

@ -4,10 +4,10 @@ DS File Loarder integration for Flowise
## 🌱 Env Variables
| Variable | Description | Type | Default |
| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- |
| UNSTRUCTURED_API_URL | Default `unstructuredApiUrl` for S3 File Loader | String | http://localhost:8000/general/v0/general |
| Variable | Description | Type | Default |
| -------------------- | ----------------------------------------------- | ------ | ---------------------------------------- |
| UNSTRUCTURED_API_URL | Default `unstructuredApiUrl` for S3 File Loader | String | http://localhost:8000/general/v0/general |
## License
Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md).
Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md).

View File

@ -98,11 +98,12 @@ class Text_DocumentLoaders implements INode {
} else {
files = [fileName]
}
const orgId = options.orgId
const chatflowid = options.chatflowid
for (const file of files) {
if (!file) continue
const fileData = await getFileFromStorage(file, chatflowid)
const fileData = await getFileFromStorage(file, orgId, chatflowid)
const blob = new Blob([fileData])
const loader = new TextLoader(blob)

View File

@ -4,10 +4,10 @@ Unstructured File Loader integration for Flowise
## 🌱 Env Variables
| Variable | Description | Type | Default |
| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- |
| UNSTRUCTURED_API_URL | Default `apiUrl` for Unstructured File/Floder Loader | String | http://localhost:8000/general/v0/general |
| Variable | Description | Type | Default |
| -------------------- | ---------------------------------------------------- | ------ | ---------------------------------------- |
| UNSTRUCTURED_API_URL | Default `apiUrl` for Unstructured File/Floder Loader | String | http://localhost:8000/general/v0/general |
## License
Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md).
Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md).

View File

@ -532,11 +532,12 @@ class UnstructuredFile_DocumentLoaders implements INode {
} else {
files = [fileName]
}
const orgId = options.orgId
const chatflowid = options.chatflowid
for (const file of files) {
if (!file) continue
const fileData = await getFileFromStorage(file, chatflowid)
const fileData = await getFileFromStorage(file, orgId, chatflowid)
const loaderDocs = await loader.loadAndSplitBuffer(fileData, file)
docs.push(...loaderDocs)
}

View File

@ -4,13 +4,13 @@ Azure OpenAI Embedding Model integration for Flowise
## 🌱 Env Variables
| Variable | Description | Type | Default |
| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- |
| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI Model | String | |
| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI Model | String | |
| AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI Model | String | |
| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI Model | String | |
| Variable | Description | Type | Default |
| ------------------------------------------- | ------------------------------------------------------------------------ | ------ | ------- |
| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI Model | String | |
| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI Model | String | |
| AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI Model | String | |
| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI Model | String | |
## License
Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md).
Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md).

View File

@ -10,6 +10,7 @@ import {
} from '../../../src/Interface'
import { Metadata, BaseRetriever, LLM, ContextChatEngine, ChatMessage, NodeWithScore } from 'llamaindex'
import { reformatSourceDocuments } from '../EngineUtils'
import { EvaluationRunTracerLlama } from '../../../evaluation/EvaluationRunTracerLlama'
class ContextChatEngine_LlamaIndex implements INode {
label: string
@ -93,6 +94,9 @@ class ContextChatEngine_LlamaIndex implements INode {
const chatEngine = new ContextChatEngine({ chatModel: model, retriever: vectorStoreRetriever })
// these are needed for evaluation runs
await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, chatEngine)
const msgs = (await memory.getChatMessages(this.sessionId, false, prependMessages)) as IMessage[]
for (const message of msgs) {
if (message.type === 'apiMessage') {

View File

@ -9,6 +9,7 @@ import {
IServerSideEventStreamer
} from '../../../src/Interface'
import { LLM, ChatMessage, SimpleChatEngine } from 'llamaindex'
import { EvaluationRunTracerLlama } from '../../../evaluation/EvaluationRunTracerLlama'
class SimpleChatEngine_LlamaIndex implements INode {
label: string
@ -78,6 +79,9 @@ class SimpleChatEngine_LlamaIndex implements INode {
const chatEngine = new SimpleChatEngine({ llm: model })
// these are needed for evaluation runs
await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, chatEngine)
const msgs = (await memory.getChatMessages(this.sessionId, false, prependMessages)) as IMessage[]
for (const message of msgs) {
if (message.type === 'apiMessage') {

View File

@ -10,6 +10,7 @@ import {
NodeWithScore
} from 'llamaindex'
import { reformatSourceDocuments } from '../EngineUtils'
import { EvaluationRunTracerLlama } from '../../../evaluation/EvaluationRunTracerLlama'
class QueryEngine_LlamaIndex implements INode {
label: string
@ -72,6 +73,8 @@ class QueryEngine_LlamaIndex implements INode {
let sourceNodes: NodeWithScore<Metadata>[] = []
let isStreamingStarted = false
await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, queryEngine)
const shouldStreamResponse = options.shouldStreamResponse
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
const chatId = options.chatId

View File

@ -15,6 +15,7 @@ import {
NodeWithScore
} from 'llamaindex'
import { reformatSourceDocuments } from '../EngineUtils'
import { EvaluationRunTracerLlama } from '../../../evaluation/EvaluationRunTracerLlama'
class SubQuestionQueryEngine_LlamaIndex implements INode {
label: string
@ -89,6 +90,8 @@ class SubQuestionQueryEngine_LlamaIndex implements INode {
let sourceNodes: NodeWithScore<Metadata>[] = []
let isStreamingStarted = false
await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, queryEngine)
const shouldStreamResponse = options.shouldStreamResponse
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
const chatId = options.chatId

View File

@ -4,13 +4,13 @@ Azure OpenAI LLM integration for Flowise
## 🌱 Env Variables
| Variable | Description | Type | Default |
| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- |
| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI LLM | String | |
| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI LLM | String | |
| AZURE_OPENAI_API_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI LLM | String | |
| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI LLM | String | |
| Variable | Description | Type | Default |
| -------------------------------- | ---------------------------------------------------------------------- | ------ | ------- |
| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI LLM | String | |
| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI LLM | String | |
| AZURE_OPENAI_API_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI LLM | String | |
| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI LLM | String | |
## License
Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md).
Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md).

View File

@ -108,6 +108,7 @@ class AgentMemory_Memory implements INode {
const databaseType = nodeData.inputs?.databaseType as string
const databaseEntities = options.databaseEntities as IDatabaseEntity
const chatflowid = options.chatflowid as string
const orgId = options.orgId as string
const appDataSource = options.appDataSource as DataSource
let additionalConfiguration = {}
@ -135,7 +136,8 @@ class AgentMemory_Memory implements INode {
threadId,
appDataSource,
databaseEntities,
chatflowid
chatflowid,
orgId
}
const recordManager = new SqliteSaver(args)
return recordManager
@ -159,7 +161,8 @@ class AgentMemory_Memory implements INode {
threadId,
appDataSource,
databaseEntities,
chatflowid
chatflowid,
orgId
}
const recordManager = new PostgresSaver(args)
return recordManager
@ -184,7 +187,8 @@ class AgentMemory_Memory implements INode {
threadId,
appDataSource,
databaseEntities,
chatflowid
chatflowid,
orgId
}
const recordManager = new MySQLSaver(args)
return recordManager

View File

@ -65,6 +65,7 @@ class MySQLAgentMemory_Memory implements INode {
const databaseEntities = options.databaseEntities as IDatabaseEntity
const chatflowid = options.chatflowid as string
const appDataSource = options.appDataSource as DataSource
const orgId = options.orgId as string
let additionalConfiguration = {}
if (additionalConfig) {
@ -102,7 +103,8 @@ class MySQLAgentMemory_Memory implements INode {
threadId,
appDataSource,
databaseEntities,
chatflowid
chatflowid,
orgId
}
const recordManager = new MySQLSaver(args)
return recordManager

View File

@ -242,7 +242,7 @@ export class MySQLSaver extends BaseCheckpointSaver implements MemoryMethods {
}
if (returnBaseMessages) {
return await mapChatMessageToBaseMessage(chatMessage)
return await mapChatMessageToBaseMessage(chatMessage, this.config.orgId)
}
let returnIMessages: IMessage[] = []

View File

@ -65,6 +65,7 @@ class PostgresAgentMemory_Memory implements INode {
const databaseEntities = options.databaseEntities as IDatabaseEntity
const chatflowid = options.chatflowid as string
const appDataSource = options.appDataSource as DataSource
const orgId = options.orgId as string
let additionalConfiguration = {}
if (additionalConfig) {
@ -101,7 +102,8 @@ class PostgresAgentMemory_Memory implements INode {
threadId,
appDataSource,
databaseEntities,
chatflowid
chatflowid,
orgId
}
const recordManager = new PostgresSaver(args)
return recordManager

View File

@ -283,7 +283,7 @@ CREATE TABLE IF NOT EXISTS ${tableName} (
}
if (returnBaseMessages) {
return await mapChatMessageToBaseMessage(chatMessage)
return await mapChatMessageToBaseMessage(chatMessage, this.config.orgId)
}
let returnIMessages: IMessage[] = []

View File

@ -51,6 +51,7 @@ class SQLiteAgentMemory_Memory implements INode {
const databaseEntities = options.databaseEntities as IDatabaseEntity
const chatflowid = options.chatflowid as string
const appDataSource = options.appDataSource as DataSource
const orgId = options.orgId as string
let additionalConfiguration = {}
if (additionalConfig) {
@ -76,7 +77,8 @@ class SQLiteAgentMemory_Memory implements INode {
threadId,
appDataSource,
databaseEntities,
chatflowid
chatflowid,
orgId
}
const recordManager = new SqliteSaver(args)

View File

@ -266,7 +266,7 @@ CREATE TABLE IF NOT EXISTS ${tableName} (
}
if (returnBaseMessages) {
return await mapChatMessageToBaseMessage(chatMessage)
return await mapChatMessageToBaseMessage(chatMessage, this.config.orgId)
}
let returnIMessages: IMessage[] = []

View File

@ -9,6 +9,7 @@ export type SaverOptions = {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
orgId: string
}
export interface CheckpointTuple {

View File

@ -61,6 +61,7 @@ class BufferMemory_Memory implements INode {
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
const chatflowid = options.chatflowid as string
const orgId = options.orgId as string
return new BufferMemoryExtended({
returnMessages: true,
@ -68,7 +69,8 @@ class BufferMemory_Memory implements INode {
sessionId,
appDataSource,
databaseEntities,
chatflowid
chatflowid,
orgId
})
}
}
@ -78,12 +80,14 @@ interface BufferMemoryExtendedInput {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
orgId: string
}
class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
orgId: string
sessionId = ''
constructor(fields: BufferMemoryInput & BufferMemoryExtendedInput) {
@ -92,6 +96,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
this.appDataSource = fields.appDataSource
this.databaseEntities = fields.databaseEntities
this.chatflowid = fields.chatflowid
this.orgId = fields.orgId
}
async getChatMessages(
@ -117,7 +122,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
}
if (returnBaseMessages) {
return await mapChatMessageToBaseMessage(chatMessage)
return await mapChatMessageToBaseMessage(chatMessage, this.orgId)
}
let returnIMessages: IMessage[] = []

View File

@ -69,6 +69,7 @@ class BufferWindowMemory_Memory implements INode {
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
const chatflowid = options.chatflowid as string
const orgId = options.orgId as string
const obj: Partial<BufferWindowMemoryInput> & BufferMemoryExtendedInput = {
returnMessages: true,
@ -77,7 +78,8 @@ class BufferWindowMemory_Memory implements INode {
k: parseInt(k, 10),
appDataSource,
databaseEntities,
chatflowid
chatflowid,
orgId
}
return new BufferWindowMemoryExtended(obj)
@ -89,12 +91,14 @@ interface BufferMemoryExtendedInput {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
orgId: string
}
class BufferWindowMemoryExtended extends FlowiseWindowMemory implements MemoryMethods {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
orgId: string
sessionId = ''
constructor(fields: BufferWindowMemoryInput & BufferMemoryExtendedInput) {
@ -103,6 +107,7 @@ class BufferWindowMemoryExtended extends FlowiseWindowMemory implements MemoryMe
this.appDataSource = fields.appDataSource
this.databaseEntities = fields.databaseEntities
this.chatflowid = fields.chatflowid
this.orgId = fields.orgId
}
async getChatMessages(
@ -134,7 +139,7 @@ class BufferWindowMemoryExtended extends FlowiseWindowMemory implements MemoryMe
}
if (returnBaseMessages) {
return await mapChatMessageToBaseMessage(chatMessage)
return await mapChatMessageToBaseMessage(chatMessage, this.orgId)
}
let returnIMessages: IMessage[] = []

View File

@ -78,6 +78,7 @@ class ConversationSummaryBufferMemory_Memory implements INode {
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
const chatflowid = options.chatflowid as string
const orgId = options.orgId as string
const obj: ConversationSummaryBufferMemoryInput & BufferMemoryExtendedInput = {
llm: model,
@ -87,7 +88,8 @@ class ConversationSummaryBufferMemory_Memory implements INode {
returnMessages: true,
appDataSource,
databaseEntities,
chatflowid
chatflowid,
orgId
}
return new ConversationSummaryBufferMemoryExtended(obj)
@ -99,12 +101,14 @@ interface BufferMemoryExtendedInput {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
orgId: string
}
class ConversationSummaryBufferMemoryExtended extends FlowiseSummaryBufferMemory implements MemoryMethods {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
orgId: string
sessionId = ''
constructor(fields: ConversationSummaryBufferMemoryInput & BufferMemoryExtendedInput) {
@ -113,6 +117,7 @@ class ConversationSummaryBufferMemoryExtended extends FlowiseSummaryBufferMemory
this.appDataSource = fields.appDataSource
this.databaseEntities = fields.databaseEntities
this.chatflowid = fields.chatflowid
this.orgId = fields.orgId
}
async getChatMessages(
@ -137,7 +142,7 @@ class ConversationSummaryBufferMemoryExtended extends FlowiseSummaryBufferMemory
chatMessage.unshift(...prependMessages)
}
let baseMessages = await mapChatMessageToBaseMessage(chatMessage)
let baseMessages = await mapChatMessageToBaseMessage(chatMessage, this.orgId)
// Prune baseMessages if it exceeds max token limit
if (this.movingSummaryBuffer) {

View File

@ -69,6 +69,7 @@ class ConversationSummaryMemory_Memory implements INode {
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
const chatflowid = options.chatflowid as string
const orgId = options.orgId as string
const obj: ConversationSummaryMemoryInput & BufferMemoryExtendedInput = {
llm: model,
@ -77,7 +78,8 @@ class ConversationSummaryMemory_Memory implements INode {
sessionId,
appDataSource,
databaseEntities,
chatflowid
chatflowid,
orgId
}
return new ConversationSummaryMemoryExtended(obj)
@ -89,12 +91,14 @@ interface BufferMemoryExtendedInput {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
orgId: string
}
class ConversationSummaryMemoryExtended extends FlowiseSummaryMemory implements MemoryMethods {
appDataSource: DataSource
databaseEntities: IDatabaseEntity
chatflowid: string
orgId: string
sessionId = ''
constructor(fields: ConversationSummaryMemoryInput & BufferMemoryExtendedInput) {
@ -103,6 +107,7 @@ class ConversationSummaryMemoryExtended extends FlowiseSummaryMemory implements
this.appDataSource = fields.appDataSource
this.databaseEntities = fields.databaseEntities
this.chatflowid = fields.chatflowid
this.orgId = fields.orgId
}
async getChatMessages(
@ -128,7 +133,7 @@ class ConversationSummaryMemoryExtended extends FlowiseSummaryMemory implements
chatMessage.unshift(...prependMessages)
}
const baseMessages = await mapChatMessageToBaseMessage(chatMessage)
const baseMessages = await mapChatMessageToBaseMessage(chatMessage, this.orgId)
// Get summary
if (this.llm && typeof this.llm !== 'string') {

View File

@ -125,6 +125,8 @@ const initializeDynamoDB = async (nodeData: INodeData, options: ICommonObject):
config
})
const orgId = options.orgId as string
const memory = new BufferMemoryExtended({
memoryKey: memoryKey ?? 'chat_history',
chatHistory: dynamoDb,
@ -132,7 +134,8 @@ const initializeDynamoDB = async (nodeData: INodeData, options: ICommonObject):
dynamodbClient: client,
tableName,
partitionKey,
dynamoKey: { [partitionKey]: { S: sessionId } }
dynamoKey: { [partitionKey]: { S: sessionId } },
orgId
})
return memory
}
@ -143,6 +146,7 @@ interface BufferMemoryExtendedInput {
tableName: string
partitionKey: string
dynamoKey: Record<string, AttributeValue>
orgId: string
}
interface DynamoDBSerializedChatMessage {
@ -165,6 +169,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
private dynamoKey: Record<string, AttributeValue>
private messageAttributeName: string
sessionId = ''
orgId = ''
dynamodbClient: DynamoDBClient
constructor(fields: BufferMemoryInput & BufferMemoryExtendedInput) {
@ -174,6 +179,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
this.tableName = fields.tableName
this.partitionKey = fields.partitionKey
this.dynamoKey = fields.dynamoKey
this.orgId = fields.orgId
}
overrideDynamoKey(overrideSessionId = '') {
@ -260,7 +266,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
.filter((x): x is StoredMessage => x.type !== undefined && x.data.content !== undefined)
const baseMessages = messages.map(mapStoredMessageToChatMessage)
if (prependMessages?.length) {
baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages)))
baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId)))
}
return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages)
}

View File

@ -151,6 +151,7 @@ class Mem0_Memory implements INode {
const initializeMem0 = async (nodeData: INodeData, options: ICommonObject): Promise<BaseMem0Memory> => {
const initialUserId = nodeData.inputs?.user_id as string
const useFlowiseChatId = nodeData.inputs?.useFlowiseChatId as boolean
const orgId = options.orgId as string
if (!useFlowiseChatId && !initialUserId) {
throw new Error('User ID field cannot be empty when "Use Flowise Chat ID" is OFF.')
@ -198,7 +199,8 @@ const initializeMem0 = async (nodeData: INodeData, options: ICommonObject): Prom
databaseEntities: options.databaseEntities as IDatabaseEntity,
chatflowid: options.chatflowid as string,
searchOnly: (nodeData.inputs?.searchOnly as boolean) || false,
useFlowiseChatId: useFlowiseChatId
useFlowiseChatId: useFlowiseChatId,
orgId: orgId
}
return new Mem0MemoryExtended(obj)
@ -207,11 +209,13 @@ const initializeMem0 = async (nodeData: INodeData, options: ICommonObject): Prom
interface Mem0MemoryExtendedInput extends Mem0MemoryInput {
memoryOptions?: MemoryOptions | SearchOptions
useFlowiseChatId: boolean
orgId: string
}
class Mem0MemoryExtended extends BaseMem0Memory implements MemoryMethods {
initialUserId: string
userId: string
orgId: string
memoryKey: string
inputKey: string
appDataSource: DataSource
@ -233,6 +237,7 @@ class Mem0MemoryExtended extends BaseMem0Memory implements MemoryMethods {
this.chatflowid = fields.chatflowid
this.searchOnly = fields.searchOnly
this.useFlowiseChatId = fields.useFlowiseChatId
this.orgId = fields.orgId
}
// Selects Mem0 user_id based on toggle state (Flowise chat ID or input field)
@ -337,7 +342,7 @@ class Mem0MemoryExtended extends BaseMem0Memory implements MemoryMethods {
console.warn('Mem0 history is not a string, cannot prepend directly.')
}
return await mapChatMessageToBaseMessage(chatMessage)
return await mapChatMessageToBaseMessage(chatMessage, this.orgId)
}
return returnIMessages

View File

@ -88,9 +88,12 @@ const initializeMongoDB = async (nodeData: INodeData, options: ICommonObject): P
const mongoDBConnectUrl = getCredentialParam('mongoDBConnectUrl', credentialData, nodeData)
const driverInfo = { name: 'Flowise', version: (await getVersion()).version }
const orgId = options.orgId as string
return new BufferMemoryExtended({
memoryKey: memoryKey ?? 'chat_history',
sessionId,
orgId,
mongoConnection: {
databaseName,
collectionName,
@ -102,6 +105,7 @@ const initializeMongoDB = async (nodeData: INodeData, options: ICommonObject): P
interface BufferMemoryExtendedInput {
sessionId: string
orgId: string
mongoConnection: {
databaseName: string
collectionName: string
@ -112,6 +116,7 @@ interface BufferMemoryExtendedInput {
class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
sessionId = ''
orgId = ''
mongoConnection: {
databaseName: string
collectionName: string
@ -122,6 +127,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
constructor(fields: BufferMemoryInput & BufferMemoryExtendedInput) {
super(fields)
this.sessionId = fields.sessionId
this.orgId = fields.orgId
this.mongoConnection = fields.mongoConnection
}
@ -138,7 +144,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
const messages = document?.messages || []
const baseMessages = messages.map(mapStoredMessageToChatMessage)
if (prependMessages?.length) {
baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages)))
baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId)))
}
await client.close()

View File

@ -88,6 +88,7 @@ const initializeRedis = async (nodeData: INodeData, options: ICommonObject): Pro
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const redisUrl = getCredentialParam('redisUrl', credentialData, nodeData)
const orgId = options.orgId as string
const redisOptions = redisUrl
? redisUrl
@ -104,7 +105,8 @@ const initializeRedis = async (nodeData: INodeData, options: ICommonObject): Pro
sessionId,
windowSize,
sessionTTL,
redisOptions
redisOptions,
orgId
})
return memory
@ -114,11 +116,13 @@ interface BufferMemoryExtendedInput {
sessionId: string
windowSize?: number
sessionTTL?: number
orgId: string
redisOptions: RedisOptions | string
}
class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
sessionId = ''
orgId = ''
windowSize?: number
sessionTTL?: number
redisOptions: RedisOptions | string
@ -128,6 +132,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
this.sessionId = fields.sessionId
this.windowSize = fields.windowSize
this.sessionTTL = fields.sessionTTL
this.orgId = fields.orgId
this.redisOptions = fields.redisOptions
}
@ -165,7 +170,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
const orderedMessages = rawStoredMessages.reverse().map((message) => JSON.parse(message))
const baseMessages = orderedMessages.map(mapStoredMessageToChatMessage)
if (prependMessages?.length) {
baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages)))
baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId)))
}
return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages)
})

View File

@ -100,13 +100,14 @@ const initalizeUpstashRedis = async (nodeData: INodeData, options: ICommonObject
sessionTTL,
client
})
const orgId = options.orgId as string
const memory = new BufferMemoryExtended({
memoryKey: memoryKey ?? 'chat_history',
chatHistory: redisChatMessageHistory,
sessionId,
sessionTTL,
redisClient: client
redisClient: client,
orgId
})
return memory
@ -115,11 +116,13 @@ const initalizeUpstashRedis = async (nodeData: INodeData, options: ICommonObject
interface BufferMemoryExtendedInput {
redisClient: Redis
sessionId: string
orgId: string
sessionTTL?: number
}
class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
sessionId = ''
orgId = ''
redisClient: Redis
sessionTTL?: number
@ -128,6 +131,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
this.sessionId = fields.sessionId
this.redisClient = fields.redisClient
this.sessionTTL = fields.sessionTTL
this.orgId = fields.orgId
}
async getChatMessages(
@ -143,7 +147,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods {
const previousMessages = orderedMessages.filter((x): x is StoredMessage => x.type !== undefined && x.data.content !== undefined)
const baseMessages = previousMessages.map(mapStoredMessageToChatMessage)
if (prependMessages?.length) {
baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages)))
baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId)))
}
return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages)
}

View File

@ -119,6 +119,7 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
const orgId = options.orgId as string
const obj: ZepMemoryInput & ZepMemoryExtendedInput = {
baseURL,
aiPrefix,
@ -127,6 +128,7 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi
memoryKey,
inputKey,
sessionId,
orgId,
k: k ? parseInt(k, 10) : undefined
}
if (apiKey) obj.apiKey = apiKey
@ -136,14 +138,17 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi
interface ZepMemoryExtendedInput {
k?: number
orgId: string
}
class ZepMemoryExtended extends ZepMemory implements MemoryMethods {
lastN?: number
orgId = ''
constructor(fields: ZepMemoryInput & ZepMemoryExtendedInput) {
super(fields)
this.lastN = fields.k
this.orgId = fields.orgId
}
async loadMemoryVariables(values: InputValues, overrideSessionId = ''): Promise<MemoryVariables> {
@ -176,7 +181,7 @@ class ZepMemoryExtended extends ZepMemory implements MemoryMethods {
const memoryVariables = await this.loadMemoryVariables({}, id)
const baseMessages = memoryVariables[this.memoryKey]
if (prependMessages?.length) {
baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages)))
baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId)))
}
return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages)
}

View File

@ -113,6 +113,7 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
const orgId = options.orgId as string
const obj: ZepMemoryInput & ZepMemoryExtendedInput = {
apiKey,
aiPrefix,
@ -121,7 +122,8 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi
sessionId,
inputKey,
memoryType: memoryType,
returnMessages: true
returnMessages: true,
orgId
}
return new ZepMemoryExtended(obj)
@ -129,14 +131,17 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi
interface ZepMemoryExtendedInput {
memoryType?: 'perpetual' | 'message_window'
orgId: string
}
class ZepMemoryExtended extends ZepMemory implements MemoryMethods {
memoryType: 'perpetual' | 'message_window'
orgId: string
constructor(fields: ZepMemoryInput & ZepMemoryExtendedInput) {
super(fields)
this.memoryType = fields.memoryType ?? 'perpetual'
this.orgId = fields.orgId
}
async loadMemoryVariables(values: InputValues, overrideSessionId = ''): Promise<MemoryVariables> {
@ -169,7 +174,7 @@ class ZepMemoryExtended extends ZepMemory implements MemoryMethods {
const memoryVariables = await this.loadMemoryVariables({}, id)
const baseMessages = memoryVariables[this.memoryKey]
if (prependMessages?.length) {
baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages)))
baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId)))
}
return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages)
}

View File

@ -233,7 +233,7 @@ async function createAgent(
sessionId: flowObj?.sessionId,
chatId: flowObj?.chatId,
input: flowObj?.input,
verbose: process.env.DEBUG === 'true',
verbose: process.env.DEBUG === 'true' ? true : false,
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
})
return executor

View File

@ -120,7 +120,7 @@ class ChatPromptTemplate_Prompts implements INode {
) {
const appDataSource = options.appDataSource as DataSource
const databaseEntities = options.databaseEntities as IDatabaseEntity
const vm = await getVM(appDataSource, databaseEntities, nodeData, {})
const vm = await getVM(appDataSource, databaseEntities, nodeData, options, {})
try {
const response = await vm.run(`module.exports = async function() {${messageHistoryCode}}()`, __dirname)
if (!Array.isArray(response)) throw new Error('Returned message history must be an array')

View File

@ -680,7 +680,7 @@ async function createAgent(
sessionId: flowObj?.sessionId,
chatId: flowObj?.chatId,
input: flowObj?.input,
verbose: process.env.DEBUG === 'true',
verbose: process.env.DEBUG === 'true' ? true : false,
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
})
return executor
@ -877,7 +877,7 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom
const updateStateMemory = nodeData.inputs?.updateStateMemory as string
const selectedTab = tabIdentifier ? tabIdentifier.split(`_${nodeData.id}`)[0] : 'updateStateMemoryUI'
const variables = await getVars(appDataSource, databaseEntities, nodeData)
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
const flow = {
chatflowId: options.chatflowid,
@ -930,7 +930,7 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom
throw new Error(e)
}
} else if (selectedTab === 'updateStateMemoryCode' && updateStateMemoryCode) {
const vm = await getVM(appDataSource, databaseEntities, nodeData, flow)
const vm = await getVM(appDataSource, databaseEntities, nodeData, options, flow)
try {
const response = await vm.run(`module.exports = async function() {${updateStateMemoryCode}}()`, __dirname)
if (typeof response !== 'object') throw new Error('Return output must be an object')

View File

@ -267,7 +267,7 @@ const runCondition = async (nodeData: INodeData, input: string, options: ICommon
const tabIdentifier = nodeData.inputs?.[`${TAB_IDENTIFIER}_${nodeData.id}`] as string
const selectedTab = tabIdentifier ? tabIdentifier.split(`_${nodeData.id}`)[0] : 'conditionUI'
const variables = await getVars(appDataSource, databaseEntities, nodeData)
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
const flow = {
chatflowId: options.chatflowid,
@ -279,7 +279,7 @@ const runCondition = async (nodeData: INodeData, input: string, options: ICommon
}
if (selectedTab === 'conditionFunction' && conditionFunction) {
const vm = await getVM(appDataSource, databaseEntities, nodeData, flow)
const vm = await getVM(appDataSource, databaseEntities, nodeData, options, flow)
try {
const response = await vm.run(`module.exports = async function() {${conditionFunction}}()`, __dirname)
if (typeof response !== 'string') throw new Error('Condition function must return a string')

View File

@ -540,7 +540,7 @@ const runCondition = async (
result = { ...jsonResult, additional_kwargs: { nodeId: nodeData.id } }
}
const variables = await getVars(appDataSource, databaseEntities, nodeData)
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
const flow = {
chatflowId: options.chatflowid,
@ -553,7 +553,7 @@ const runCondition = async (
}
if (selectedTab === 'conditionFunction' && conditionFunction) {
const vm = await getVM(appDataSource, databaseEntities, nodeData, flow)
const vm = await getVM(appDataSource, databaseEntities, nodeData, options, flow)
try {
const response = await vm.run(`module.exports = async function() {${conditionFunction}}()`, __dirname)
if (typeof response !== 'string') throw new Error('Condition function must return a string')

View File

@ -102,7 +102,7 @@ class CustomFunction_SeqAgents implements INode {
if (!sequentialNodes || !sequentialNodes.length) throw new Error('Custom function must have a predecessor!')
const executeFunc = async (state: ISeqAgentsState) => {
const variables = await getVars(appDataSource, databaseEntities, nodeData)
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
const flow = {
chatflowId: options.chatflowid,
sessionId: options.sessionId,

View File

@ -141,7 +141,8 @@ class ExecuteFlow_SeqAgents implements INode {
return returnData
}
const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).find()
const searchOptions = options.searchOptions || {}
const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).findBy(searchOptions)
for (let i = 0; i < chatflows.length; i += 1) {
const data = {
@ -189,7 +190,7 @@ class ExecuteFlow_SeqAgents implements INode {
const chatId = options.chatId
const executeFunc = async (state: ISeqAgentsState) => {
const variables = await getVars(appDataSource, databaseEntities, nodeData)
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
let flowInput = ''
if (seqExecuteFlowInput === 'userQuestion') {
@ -223,7 +224,7 @@ class ExecuteFlow_SeqAgents implements INode {
}
}
const options = {
const callOptions = {
method: 'POST',
headers: {
'Content-Type': 'application/json',
@ -234,7 +235,7 @@ class ExecuteFlow_SeqAgents implements INode {
let sandbox: ICommonObject = {
$input: flowInput,
$callOptions: options,
$callOptions: callOptions,
$callBody: body,
util: undefined,
Symbol: undefined,

View File

@ -668,7 +668,7 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom
const updateStateMemory = nodeData.inputs?.updateStateMemory as string
const selectedTab = tabIdentifier ? tabIdentifier.split(`_${nodeData.id}`)[0] : 'updateStateMemoryUI'
const variables = await getVars(appDataSource, databaseEntities, nodeData)
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
const flow = {
chatflowId: options.chatflowid,
@ -721,7 +721,7 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom
throw new Error(e)
}
} else if (selectedTab === 'updateStateMemoryCode' && updateStateMemoryCode) {
const vm = await getVM(appDataSource, databaseEntities, nodeData, flow)
const vm = await getVM(appDataSource, databaseEntities, nodeData, options, flow)
try {
const response = await vm.run(`module.exports = async function() {${updateStateMemoryCode}}()`, __dirname)
if (typeof response !== 'object') throw new Error('Return output must be an object')

Some files were not shown because too many files have changed in this diff Show More