Compare commits
113 Commits
flowise-ui
...
main
| Author | SHA1 | Date |
|---|---|---|
|
|
465005a503 | |
|
|
e6e0c2d07b | |
|
|
660a8e357a | |
|
|
113180d03b | |
|
|
069ba28bc0 | |
|
|
20db1597a4 | |
|
|
478a294095 | |
|
|
6a59af11e6 | |
|
|
562370b8e2 | |
|
|
4e92db6910 | |
|
|
7cc2c13694 | |
|
|
3ad2b3a559 | |
|
|
da32fc7167 | |
|
|
315e3aedc3 | |
|
|
9dbb4bf623 | |
|
|
1f3f7a7194 | |
|
|
4d79653741 | |
|
|
03ef28afbc | |
|
|
0cc7b3036e | |
|
|
097404f24a | |
|
|
2029588d4d | |
|
|
c9db81096a | |
|
|
b5f7fac015 | |
|
|
ca22160361 | |
|
|
ffe69936dc | |
|
|
b8f7a200fb | |
|
|
2f2b6e1713 | |
|
|
4e1fac501f | |
|
|
888994bc8f | |
|
|
3cab803918 | |
|
|
366d38b861 | |
|
|
2414057c08 | |
|
|
4a642f02d0 | |
|
|
ceb0512e2f | |
|
|
94cae3b66f | |
|
|
3fafd15a80 | |
|
|
9ff3d653ae | |
|
|
0dc14b5cd3 | |
|
|
b9a020dc70 | |
|
|
761ffe6851 | |
|
|
6d3755d16e | |
|
|
faf0a0a315 | |
|
|
4624e15c2e | |
|
|
a7b6f9b208 | |
|
|
2bd96090f0 | |
|
|
346a55b6d8 | |
|
|
03c1750d73 | |
|
|
ec1762b10f | |
|
|
02bb2ba62b | |
|
|
fdb6422aef | |
|
|
fe6f5f88a5 | |
|
|
82124d4871 | |
|
|
3b8b21342d | |
|
|
679a0409f5 | |
|
|
1fa9303d7c | |
|
|
75eb5f57aa | |
|
|
3d731664f9 | |
|
|
0f8d45d25c | |
|
|
3e8db185dd | |
|
|
6f5b0d9906 | |
|
|
fd7fc2f4d7 | |
|
|
a92f7dfc3f | |
|
|
80224275d9 | |
|
|
4417102f6c | |
|
|
0149688a16 | |
|
|
f3d5b7766d | |
|
|
97515989a2 | |
|
|
601de76aea | |
|
|
c99d870c82 | |
|
|
5df09a15b8 | |
|
|
e925801b63 | |
|
|
eed7581d0e | |
|
|
1ae1638ed9 | |
|
|
0a3c8b94ab | |
|
|
9554b1a8e3 | |
|
|
ac565b8981 | |
|
|
37ef6ffa50 | |
|
|
2ae4678da4 | |
|
|
6f94d61f22 | |
|
|
62d34066c9 | |
|
|
f3f2eabb89 | |
|
|
bff859520a | |
|
|
4111ec31b0 | |
|
|
7ab586c865 | |
|
|
ac794ab6eb | |
|
|
1fb12cd931 | |
|
|
a0dca552a2 | |
|
|
a38d37f4b5 | |
|
|
1a410d84ac | |
|
|
7a50755546 | |
|
|
ac252516f8 | |
|
|
6fe5b98d6f | |
|
|
9b8fee3d8f | |
|
|
8d0a198e2f | |
|
|
580957e4aa | |
|
|
a86f618186 | |
|
|
8c1175225f | |
|
|
28b0174eea | |
|
|
b501932491 | |
|
|
6890ced939 | |
|
|
0065e8f1a0 | |
|
|
31434e52ce | |
|
|
84a0a45ff7 | |
|
|
dd284e37c3 | |
|
|
b5da234ce7 | |
|
|
e48f28d13d | |
|
|
cf6539cd3f | |
|
|
011d60332e | |
|
|
e9d4c3b54b | |
|
|
41131dfac3 | |
|
|
42152dd036 | |
|
|
fc50f2308b | |
|
|
f560768133 |
|
|
@ -0,0 +1,72 @@
|
|||
name: Docker Image CI - Docker Hub
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
node_version:
|
||||
description: 'Node.js version to build this image with.'
|
||||
type: choice
|
||||
required: true
|
||||
default: '20'
|
||||
options:
|
||||
- '20'
|
||||
tag_version:
|
||||
description: 'Tag version of the image to be pushed.'
|
||||
type: string
|
||||
required: true
|
||||
default: 'latest'
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set default values
|
||||
id: defaults
|
||||
run: |
|
||||
echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT
|
||||
echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
# -------------------------
|
||||
# Build and push main image
|
||||
# -------------------------
|
||||
- name: Build and push main image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
build-args: |
|
||||
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
flowiseai/flowise:${{ steps.defaults.outputs.tag_version }}
|
||||
|
||||
# -------------------------
|
||||
# Build and push worker image
|
||||
# -------------------------
|
||||
- name: Build and push worker image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: .
|
||||
file: docker/worker/Dockerfile
|
||||
build-args: |
|
||||
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
flowiseai/flowise-worker:${{ steps.defaults.outputs.tag_version }}
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
name: Docker Image CI - AWS ECR
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
environment:
|
||||
description: 'Environment to push the image to.'
|
||||
required: true
|
||||
default: 'dev'
|
||||
type: choice
|
||||
options:
|
||||
- dev
|
||||
- prod
|
||||
node_version:
|
||||
description: 'Node.js version to build this image with.'
|
||||
type: choice
|
||||
required: true
|
||||
default: '20'
|
||||
options:
|
||||
- '20'
|
||||
tag_version:
|
||||
description: 'Tag version of the image to be pushed.'
|
||||
type: string
|
||||
required: true
|
||||
default: 'latest'
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
environment: ${{ github.event.inputs.environment }}
|
||||
steps:
|
||||
- name: Set default values
|
||||
id: defaults
|
||||
run: |
|
||||
echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT
|
||||
echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ secrets.AWS_REGION }}
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
uses: aws-actions/amazon-ecr-login@v1
|
||||
|
||||
# -------------------------
|
||||
# Build and push main image
|
||||
# -------------------------
|
||||
- name: Build and push main image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
build-args: |
|
||||
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
${{ format('{0}.dkr.ecr.{1}.amazonaws.com/flowise:{2}',
|
||||
secrets.AWS_ACCOUNT_ID,
|
||||
secrets.AWS_REGION,
|
||||
steps.defaults.outputs.tag_version) }}
|
||||
|
|
@ -1,114 +0,0 @@
|
|||
name: Docker Image CI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
registry:
|
||||
description: 'Container Registry to push the image to.'
|
||||
type: choice
|
||||
required: true
|
||||
default: 'aws_ecr'
|
||||
options:
|
||||
- 'docker_hub'
|
||||
- 'aws_ecr'
|
||||
environment:
|
||||
description: 'Environment to push the image to.'
|
||||
required: true
|
||||
default: 'dev'
|
||||
type: choice
|
||||
options:
|
||||
- dev
|
||||
- prod
|
||||
image_type:
|
||||
description: 'Type of image to build and push.'
|
||||
type: choice
|
||||
required: true
|
||||
default: 'main'
|
||||
options:
|
||||
- 'main'
|
||||
- 'worker'
|
||||
node_version:
|
||||
description: 'Node.js version to build this image with.'
|
||||
type: choice
|
||||
required: true
|
||||
default: '20'
|
||||
options:
|
||||
- '20'
|
||||
tag_version:
|
||||
description: 'Tag version of the image to be pushed.'
|
||||
type: string
|
||||
required: true
|
||||
default: 'latest'
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
environment: ${{ github.event.inputs.environment }}
|
||||
steps:
|
||||
- name: Set default values
|
||||
id: defaults
|
||||
run: |
|
||||
echo "registry=${{ github.event.inputs.registry || 'aws_ecr' }}" >> $GITHUB_OUTPUT
|
||||
echo "image_type=${{ github.event.inputs.image_type || 'main' }}" >> $GITHUB_OUTPUT
|
||||
echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT
|
||||
echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
|
||||
# ------------------------
|
||||
# Login Steps (conditional)
|
||||
# ------------------------
|
||||
- name: Login to Docker Hub
|
||||
if: steps.defaults.outputs.registry == 'docker_hub'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
if: steps.defaults.outputs.registry == 'aws_ecr'
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ secrets.AWS_REGION }}
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
if: steps.defaults.outputs.registry == 'aws_ecr'
|
||||
uses: aws-actions/amazon-ecr-login@v1
|
||||
|
||||
# -------------------------
|
||||
# Build and push (conditional tags)
|
||||
# -------------------------
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: .
|
||||
file: |
|
||||
${{
|
||||
steps.defaults.outputs.image_type == 'worker' && 'docker/worker/Dockerfile' ||
|
||||
(steps.defaults.outputs.registry == 'docker_hub' && './docker/Dockerfile' || 'Dockerfile')
|
||||
}}
|
||||
build-args: |
|
||||
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
${{
|
||||
steps.defaults.outputs.registry == 'docker_hub' &&
|
||||
format('flowiseai/flowise{0}:{1}',
|
||||
steps.defaults.outputs.image_type == 'worker' && '-worker' || '',
|
||||
steps.defaults.outputs.tag_version) ||
|
||||
format('{0}.dkr.ecr.{1}.amazonaws.com/flowise{2}:{3}',
|
||||
secrets.AWS_ACCOUNT_ID,
|
||||
secrets.AWS_REGION,
|
||||
steps.defaults.outputs.image_type == 'worker' && '-worker' || '',
|
||||
steps.defaults.outputs.tag_version)
|
||||
}}
|
||||
|
|
@ -114,50 +114,52 @@ Flowise has 3 different modules in a single mono repository.
|
|||
|
||||
to make sure everything works fine in production.
|
||||
|
||||
11. Commit code and submit Pull Request from forked branch pointing to [Flowise master](https://github.com/FlowiseAI/Flowise/tree/master).
|
||||
11. Commit code and submit Pull Request from forked branch pointing to [Flowise main](https://github.com/FlowiseAI/Flowise/tree/main).
|
||||
|
||||
## 🌱 Env Variables
|
||||
|
||||
Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder. Read [more](https://docs.flowiseai.com/environment-variables)
|
||||
|
||||
| Variable | Description | Type | Default |
|
||||
| ---------------------------------- | -------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- |
|
||||
| PORT | The HTTP port Flowise runs on | Number | 3000 |
|
||||
| CORS_ORIGINS | The allowed origins for all cross-origin HTTP calls | String | |
|
||||
| IFRAME_ORIGINS | The allowed origins for iframe src embedding | String | |
|
||||
| FLOWISE_FILE_SIZE_LIMIT | Upload File Size Limit | String | 50mb |
|
||||
| DEBUG | Print logs from components | Boolean | |
|
||||
| LOG_PATH | Location where log files are stored | String | `your-path/Flowise/logs` |
|
||||
| LOG_LEVEL | Different levels of logs | Enum String: `error`, `info`, `verbose`, `debug` | `info` |
|
||||
| LOG_JSON_SPACES | Spaces to beautify JSON logs | | 2 |
|
||||
| TOOL_FUNCTION_BUILTIN_DEP | NodeJS built-in modules to be used for Tool Function | String | |
|
||||
| TOOL_FUNCTION_EXTERNAL_DEP | External modules to be used for Tool Function | String | |
|
||||
| DATABASE_TYPE | Type of database to store the flowise data | Enum String: `sqlite`, `mysql`, `postgres` | `sqlite` |
|
||||
| DATABASE_PATH | Location where database is saved (When DATABASE_TYPE is sqlite) | String | `your-home-dir/.flowise` |
|
||||
| DATABASE_HOST | Host URL or IP address (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_PORT | Database port (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_USER | Database username (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_PASSWORD | Database password (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_NAME | Database name (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_SSL_KEY_BASE64 | Database SSL client cert in base64 (takes priority over DATABASE_SSL) | Boolean | false |
|
||||
| DATABASE_SSL | Database connection overssl (When DATABASE_TYPE is postgre) | Boolean | false |
|
||||
| SECRETKEY_PATH | Location where encryption key (used to encrypt/decrypt credentials) is saved | String | `your-path/Flowise/packages/server` |
|
||||
| FLOWISE_SECRETKEY_OVERWRITE | Encryption key to be used instead of the key stored in SECRETKEY_PATH | String | |
|
||||
| MODEL_LIST_CONFIG_JSON | File path to load list of models from your local config file | String | `/your_model_list_config_file_path` |
|
||||
| STORAGE_TYPE | Type of storage for uploaded files. default is `local` | Enum String: `s3`, `local`, `gcs` | `local` |
|
||||
| BLOB_STORAGE_PATH | Local folder path where uploaded files are stored when `STORAGE_TYPE` is `local` | String | `your-home-dir/.flowise/storage` |
|
||||
| S3_STORAGE_BUCKET_NAME | Bucket name to hold the uploaded files when `STORAGE_TYPE` is `s3` | String | |
|
||||
| S3_STORAGE_ACCESS_KEY_ID | AWS Access Key | String | |
|
||||
| S3_STORAGE_SECRET_ACCESS_KEY | AWS Secret Key | String | |
|
||||
| S3_STORAGE_REGION | Region for S3 bucket | String | |
|
||||
| S3_ENDPOINT_URL | Custom Endpoint for S3 | String | |
|
||||
| S3_FORCE_PATH_STYLE | Set this to true to force the request to use path-style addressing | Boolean | false |
|
||||
| GOOGLE_CLOUD_STORAGE_PROJ_ID | The GCP project id for cloud storage & logging when `STORAGE_TYPE` is `gcs` | String | |
|
||||
| GOOGLE_CLOUD_STORAGE_CREDENTIAL | The credential key file path when `STORAGE_TYPE` is `gcs` | String | |
|
||||
| GOOGLE_CLOUD_STORAGE_BUCKET_NAME | Bucket name to hold the uploaded files when `STORAGE_TYPE` is `gcs` | String | |
|
||||
| GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS | Enable uniform bucket level access when `STORAGE_TYPE` is `gcs` | Boolean | true |
|
||||
| SHOW_COMMUNITY_NODES | Show nodes created by community | Boolean | |
|
||||
| DISABLED_NODES | Hide nodes from UI (comma separated list of node names) | String | |
|
||||
| Variable | Description | Type | Default |
|
||||
| ---------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- |
|
||||
| PORT | The HTTP port Flowise runs on | Number | 3000 |
|
||||
| CORS_ORIGINS | The allowed origins for all cross-origin HTTP calls | String | |
|
||||
| IFRAME_ORIGINS | The allowed origins for iframe src embedding | String | |
|
||||
| FLOWISE_FILE_SIZE_LIMIT | Upload File Size Limit | String | 50mb |
|
||||
| DEBUG | Print logs from components | Boolean | |
|
||||
| LOG_PATH | Location where log files are stored | String | `your-path/Flowise/logs` |
|
||||
| LOG_LEVEL | Different levels of logs | Enum String: `error`, `info`, `verbose`, `debug` | `info` |
|
||||
| LOG_JSON_SPACES | Spaces to beautify JSON logs | | 2 |
|
||||
| TOOL_FUNCTION_BUILTIN_DEP | NodeJS built-in modules to be used for Custom Tool or Function | String | |
|
||||
| TOOL_FUNCTION_EXTERNAL_DEP | External modules to be used for Custom Tool or Function | String | |
|
||||
| ALLOW_BUILTIN_DEP | Allow project dependencies to be used for Custom Tool or Function | Boolean | false |
|
||||
| DATABASE_TYPE | Type of database to store the flowise data | Enum String: `sqlite`, `mysql`, `postgres` | `sqlite` |
|
||||
| DATABASE_PATH | Location where database is saved (When DATABASE_TYPE is sqlite) | String | `your-home-dir/.flowise` |
|
||||
| DATABASE_HOST | Host URL or IP address (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_PORT | Database port (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_USER | Database username (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_PASSWORD | Database password (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_NAME | Database name (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_SSL_KEY_BASE64 | Database SSL client cert in base64 (takes priority over DATABASE_SSL) | Boolean | false |
|
||||
| DATABASE_SSL | Database connection overssl (When DATABASE_TYPE is postgre) | Boolean | false |
|
||||
| SECRETKEY_PATH | Location where encryption key (used to encrypt/decrypt credentials) is saved | String | `your-path/Flowise/packages/server` |
|
||||
| FLOWISE_SECRETKEY_OVERWRITE | Encryption key to be used instead of the key stored in SECRETKEY_PATH | String | |
|
||||
| MODEL_LIST_CONFIG_JSON | File path to load list of models from your local config file | String | `/your_model_list_config_file_path` |
|
||||
| STORAGE_TYPE | Type of storage for uploaded files. default is `local` | Enum String: `s3`, `local`, `gcs` | `local` |
|
||||
| BLOB_STORAGE_PATH | Local folder path where uploaded files are stored when `STORAGE_TYPE` is `local` | String | `your-home-dir/.flowise/storage` |
|
||||
| S3_STORAGE_BUCKET_NAME | Bucket name to hold the uploaded files when `STORAGE_TYPE` is `s3` | String | |
|
||||
| S3_STORAGE_ACCESS_KEY_ID | AWS Access Key | String | |
|
||||
| S3_STORAGE_SECRET_ACCESS_KEY | AWS Secret Key | String | |
|
||||
| S3_STORAGE_REGION | Region for S3 bucket | String | |
|
||||
| S3_ENDPOINT_URL | Custom Endpoint for S3 | String | |
|
||||
| S3_FORCE_PATH_STYLE | Set this to true to force the request to use path-style addressing | Boolean | false |
|
||||
| GOOGLE_CLOUD_STORAGE_PROJ_ID | The GCP project id for cloud storage & logging when `STORAGE_TYPE` is `gcs` | String | |
|
||||
| GOOGLE_CLOUD_STORAGE_CREDENTIAL | The credential key file path when `STORAGE_TYPE` is `gcs` | String | |
|
||||
| GOOGLE_CLOUD_STORAGE_BUCKET_NAME | Bucket name to hold the uploaded files when `STORAGE_TYPE` is `gcs` | String | |
|
||||
| GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS | Enable uniform bucket level access when `STORAGE_TYPE` is `gcs` | Boolean | true |
|
||||
| SHOW_COMMUNITY_NODES | Show nodes created by community | Boolean | |
|
||||
| DISABLED_NODES | Hide nodes from UI (comma separated list of node names) | String | |
|
||||
| TRUST_PROXY | Configure proxy trust settings for proper IP detection. Values: 'true' (trust all), 'false' (disable), number (hop count), or Express proxy values (e.g., 'loopback', 'linklocal', 'uniquelocal', IP addresses). [Learn More](https://expressjs.com/en/guide/behind-proxies.html) | Boolean/String/Number | true |
|
||||
|
||||
You can also specify the env variables when using `npx`. For example:
|
||||
|
||||
|
|
|
|||
39
Dockerfile
39
Dockerfile
|
|
@ -5,34 +5,41 @@
|
|||
# docker run -d -p 3000:3000 flowise
|
||||
|
||||
FROM node:20-alpine
|
||||
RUN apk add --update libc6-compat python3 make g++
|
||||
# needed for pdfjs-dist
|
||||
RUN apk add --no-cache build-base cairo-dev pango-dev
|
||||
|
||||
# Install Chromium
|
||||
RUN apk add --no-cache chromium
|
||||
|
||||
# Install curl for container-level health checks
|
||||
# Fixes: https://github.com/FlowiseAI/Flowise/issues/4126
|
||||
RUN apk add --no-cache curl
|
||||
|
||||
#install PNPM globaly
|
||||
RUN npm install -g pnpm
|
||||
# Install system dependencies and build tools
|
||||
RUN apk update && \
|
||||
apk add --no-cache \
|
||||
libc6-compat \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
build-base \
|
||||
cairo-dev \
|
||||
pango-dev \
|
||||
chromium \
|
||||
curl && \
|
||||
npm install -g pnpm
|
||||
|
||||
ENV PUPPETEER_SKIP_DOWNLOAD=true
|
||||
ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium-browser
|
||||
|
||||
ENV NODE_OPTIONS=--max-old-space-size=8192
|
||||
|
||||
WORKDIR /usr/src
|
||||
WORKDIR /usr/src/flowise
|
||||
|
||||
# Copy app source
|
||||
COPY . .
|
||||
|
||||
RUN pnpm install
|
||||
# Install dependencies and build
|
||||
RUN pnpm install && \
|
||||
pnpm build
|
||||
|
||||
RUN pnpm build
|
||||
# Give the node user ownership of the application files
|
||||
RUN chown -R node:node .
|
||||
|
||||
# Switch to non-root user (node user already exists in node:20-alpine)
|
||||
USER node
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
CMD [ "pnpm", "start" ]
|
||||
CMD [ "pnpm", "start" ]
|
||||
|
|
@ -190,6 +190,10 @@ Deploy Flowise self-hosted in your existing infrastructure, we support various [
|
|||
|
||||
[](https://railway.app/template/pn4G8S?referralCode=WVNPD9)
|
||||
|
||||
- [Northflank](https://northflank.com/stacks/deploy-flowiseai)
|
||||
|
||||
[](https://northflank.com/stacks/deploy-flowiseai)
|
||||
|
||||
- [Render](https://docs.flowiseai.com/configuration/deployment/render)
|
||||
|
||||
[](https://docs.flowiseai.com/configuration/deployment/render)
|
||||
|
|
|
|||
58
SECURITY.md
58
SECURITY.md
|
|
@ -1,40 +1,38 @@
|
|||
### Responsible Disclosure Policy
|
||||
### Responsible Disclosure Policy
|
||||
|
||||
At Flowise, we prioritize security and continuously work to safeguard our systems. However, vulnerabilities can still exist. If you identify a security issue, please report it to us so we can address it promptly. Your cooperation helps us better protect our platform and users.
|
||||
At Flowise, we prioritize security and continuously work to safeguard our systems. However, vulnerabilities can still exist. If you identify a security issue, please report it to us so we can address it promptly. Your cooperation helps us better protect our platform and users.
|
||||
|
||||
### Vulnerabilities
|
||||
### Out of scope vulnerabilities
|
||||
|
||||
The following types of issues are some of the most common vulnerabilities:
|
||||
- Clickjacking on pages without sensitive actions
|
||||
- CSRF on unauthenticated/logout/login pages
|
||||
- Attacks requiring MITM (Man-in-the-Middle) or physical device access
|
||||
- Social engineering attacks
|
||||
- Activities that cause service disruption (DoS)
|
||||
- Content spoofing and text injection without a valid attack vector
|
||||
- Email spoofing
|
||||
- Absence of DNSSEC, CAA, CSP headers
|
||||
- Missing Secure or HTTP-only flag on non-sensitive cookies
|
||||
- Deadlinks
|
||||
- User enumeration
|
||||
|
||||
- Clickjacking on pages without sensitive actions
|
||||
- CSRF on unauthenticated/logout/login pages
|
||||
- Attacks requiring MITM (Man-in-the-Middle) or physical device access
|
||||
- Social engineering attacks
|
||||
- Activities that cause service disruption (DoS)
|
||||
- Content spoofing and text injection without a valid attack vector
|
||||
- Email spoofing
|
||||
- Absence of DNSSEC, CAA, CSP headers
|
||||
- Missing Secure or HTTP-only flag on non-sensitive cookies
|
||||
- Deadlinks
|
||||
- User enumeration
|
||||
### Reporting Guidelines
|
||||
|
||||
### Reporting Guidelines
|
||||
- Submit your findings to https://github.com/FlowiseAI/Flowise/security
|
||||
- Provide clear details to help us reproduce and fix the issue quickly.
|
||||
|
||||
- Submit your findings to https://github.com/FlowiseAI/Flowise/security
|
||||
- Provide clear details to help us reproduce and fix the issue quickly.
|
||||
### Disclosure Guidelines
|
||||
|
||||
### Disclosure Guidelines
|
||||
- Do not publicly disclose vulnerabilities until we have assessed, resolved, and notified affected users.
|
||||
- If you plan to present your research (e.g., at a conference or in a blog), share a draft with us at least **30 days in advance** for review.
|
||||
- Avoid including:
|
||||
- Data from any Flowise customer projects
|
||||
- Flowise user/customer information
|
||||
- Details about Flowise employees, contractors, or partners
|
||||
|
||||
- Do not publicly disclose vulnerabilities until we have assessed, resolved, and notified affected users.
|
||||
- If you plan to present your research (e.g., at a conference or in a blog), share a draft with us at least **30 days in advance** for review.
|
||||
- Avoid including:
|
||||
- Data from any Flowise customer projects
|
||||
- Flowise user/customer information
|
||||
- Details about Flowise employees, contractors, or partners
|
||||
### Response to Reports
|
||||
|
||||
### Response to Reports
|
||||
- We will acknowledge your report within **5 business days** and provide an estimated resolution timeline.
|
||||
- Your report will be kept **confidential**, and your details will not be shared without your consent.
|
||||
|
||||
- We will acknowledge your report within **5 business days** and provide an estimated resolution timeline.
|
||||
- Your report will be kept **confidential**, and your details will not be shared without your consent.
|
||||
|
||||
We appreciate your efforts in helping us maintain a secure platform and look forward to working together to resolve any issues responsibly.
|
||||
We appreciate your efforts in helping us maintain a secure platform and look forward to working together to resolve any issues responsibly.
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ DATABASE_PATH=/root/.flowise
|
|||
# DATABASE_USER=root
|
||||
# DATABASE_PASSWORD=mypassword
|
||||
# DATABASE_SSL=true
|
||||
# DATABASE_REJECT_UNAUTHORIZED=true
|
||||
# DATABASE_SSL_KEY_BASE64=<Self signed certificate in BASE64>
|
||||
|
||||
|
||||
|
|
@ -37,8 +38,11 @@ SECRETKEY_PATH=/root/.flowise
|
|||
# DEBUG=true
|
||||
LOG_PATH=/root/.flowise/logs
|
||||
# LOG_LEVEL=info #(error | warn | info | verbose | debug)
|
||||
# LOG_SANITIZE_BODY_FIELDS=password,pwd,pass,secret,token,apikey,api_key,accesstoken,access_token,refreshtoken,refresh_token,clientsecret,client_secret,privatekey,private_key,secretkey,secret_key,auth,authorization,credential,credentials
|
||||
# LOG_SANITIZE_HEADER_FIELDS=authorization,x-api-key,x-auth-token,cookie
|
||||
# TOOL_FUNCTION_BUILTIN_DEP=crypto,fs
|
||||
# TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash
|
||||
# ALLOW_BUILTIN_DEP=false
|
||||
|
||||
|
||||
############################################################################################################
|
||||
|
|
@ -97,6 +101,7 @@ JWT_TOKEN_EXPIRY_IN_MINUTES=360
|
|||
JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200
|
||||
# EXPIRE_AUTH_TOKENS_ON_RESTART=true # (if you need to expire all tokens on app restart)
|
||||
# EXPRESS_SESSION_SECRET=flowise
|
||||
# SECURE_COOKIES=
|
||||
|
||||
# INVITE_TOKEN_EXPIRY_IN_HOURS=24
|
||||
# PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=15
|
||||
|
|
@ -162,4 +167,14 @@ JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200
|
|||
# REDIS_KEY=
|
||||
# REDIS_CA=
|
||||
# REDIS_KEEP_ALIVE=
|
||||
# ENABLE_BULLMQ_DASHBOARD=
|
||||
# ENABLE_BULLMQ_DASHBOARD=
|
||||
|
||||
|
||||
############################################################################################################
|
||||
############################################## SECURITY ####################################################
|
||||
############################################################################################################
|
||||
|
||||
# HTTP_DENY_LIST=
|
||||
# CUSTOM_MCP_SECURITY_CHECK=true
|
||||
# CUSTOM_MCP_PROTOCOL=sse #(stdio | sse)
|
||||
# TRUST_PROXY=true #(true | false | 1 | loopback| linklocal | uniquelocal | IP addresses | loopback, IP addresses)
|
||||
|
|
|
|||
|
|
@ -46,10 +46,13 @@ services:
|
|||
- DEBUG=${DEBUG}
|
||||
- LOG_PATH=${LOG_PATH}
|
||||
- LOG_LEVEL=${LOG_LEVEL}
|
||||
- LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS}
|
||||
- LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS}
|
||||
|
||||
# CUSTOM TOOL DEPENDENCIES
|
||||
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
||||
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
||||
- TOOL_FUNCTION_EXTERNAL_DEP=${TOOL_FUNCTION_EXTERNAL_DEP}
|
||||
- ALLOW_BUILTIN_DEP=${ALLOW_BUILTIN_DEP}
|
||||
|
||||
# STORAGE
|
||||
- STORAGE_TYPE=${STORAGE_TYPE}
|
||||
|
|
@ -88,6 +91,7 @@ services:
|
|||
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
||||
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
||||
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
||||
- SECURE_COOKIES=${SECURE_COOKIES}
|
||||
|
||||
# EMAIL
|
||||
- SMTP_HOST=${SMTP_HOST}
|
||||
|
|
@ -138,6 +142,12 @@ services:
|
|||
- REDIS_CA=${REDIS_CA}
|
||||
- REDIS_KEEP_ALIVE=${REDIS_KEEP_ALIVE}
|
||||
- ENABLE_BULLMQ_DASHBOARD=${ENABLE_BULLMQ_DASHBOARD}
|
||||
|
||||
# SECURITY
|
||||
- CUSTOM_MCP_SECURITY_CHECK=${CUSTOM_MCP_SECURITY_CHECK}
|
||||
- CUSTOM_MCP_PROTOCOL=${CUSTOM_MCP_PROTOCOL}
|
||||
- HTTP_DENY_LIST=${HTTP_DENY_LIST}
|
||||
- TRUST_PROXY=${TRUST_PROXY}
|
||||
healthcheck:
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:${PORT:-3000}/api/v1/ping']
|
||||
interval: 10s
|
||||
|
|
@ -182,10 +192,13 @@ services:
|
|||
- DEBUG=${DEBUG}
|
||||
- LOG_PATH=${LOG_PATH}
|
||||
- LOG_LEVEL=${LOG_LEVEL}
|
||||
- LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS}
|
||||
- LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS}
|
||||
|
||||
# CUSTOM TOOL DEPENDENCIES
|
||||
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
||||
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
||||
- TOOL_FUNCTION_EXTERNAL_DEP=${TOOL_FUNCTION_EXTERNAL_DEP}
|
||||
- ALLOW_BUILTIN_DEP=${ALLOW_BUILTIN_DEP}
|
||||
|
||||
# STORAGE
|
||||
- STORAGE_TYPE=${STORAGE_TYPE}
|
||||
|
|
@ -224,6 +237,7 @@ services:
|
|||
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
||||
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
||||
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
||||
- SECURE_COOKIES=${SECURE_COOKIES}
|
||||
|
||||
# EMAIL
|
||||
- SMTP_HOST=${SMTP_HOST}
|
||||
|
|
@ -274,6 +288,12 @@ services:
|
|||
- REDIS_CA=${REDIS_CA}
|
||||
- REDIS_KEEP_ALIVE=${REDIS_KEEP_ALIVE}
|
||||
- ENABLE_BULLMQ_DASHBOARD=${ENABLE_BULLMQ_DASHBOARD}
|
||||
|
||||
# SECURITY
|
||||
- CUSTOM_MCP_SECURITY_CHECK=${CUSTOM_MCP_SECURITY_CHECK}
|
||||
- CUSTOM_MCP_PROTOCOL=${CUSTOM_MCP_PROTOCOL}
|
||||
- HTTP_DENY_LIST=${HTTP_DENY_LIST}
|
||||
- TRUST_PROXY=${TRUST_PROXY}
|
||||
healthcheck:
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:${WORKER_PORT:-5566}/healthz']
|
||||
interval: 10s
|
||||
|
|
|
|||
|
|
@ -31,10 +31,13 @@ services:
|
|||
- DEBUG=${DEBUG}
|
||||
- LOG_PATH=${LOG_PATH}
|
||||
- LOG_LEVEL=${LOG_LEVEL}
|
||||
- LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS}
|
||||
- LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS}
|
||||
|
||||
# CUSTOM TOOL DEPENDENCIES
|
||||
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
||||
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
||||
- TOOL_FUNCTION_EXTERNAL_DEP=${TOOL_FUNCTION_EXTERNAL_DEP}
|
||||
- ALLOW_BUILTIN_DEP=${ALLOW_BUILTIN_DEP}
|
||||
|
||||
# STORAGE
|
||||
- STORAGE_TYPE=${STORAGE_TYPE}
|
||||
|
|
@ -73,6 +76,7 @@ services:
|
|||
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
||||
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
||||
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
||||
- SECURE_COOKIES=${SECURE_COOKIES}
|
||||
|
||||
# EMAIL
|
||||
- SMTP_HOST=${SMTP_HOST}
|
||||
|
|
@ -123,6 +127,12 @@ services:
|
|||
- REDIS_CA=${REDIS_CA}
|
||||
- REDIS_KEEP_ALIVE=${REDIS_KEEP_ALIVE}
|
||||
- ENABLE_BULLMQ_DASHBOARD=${ENABLE_BULLMQ_DASHBOARD}
|
||||
|
||||
# SECURITY
|
||||
- CUSTOM_MCP_SECURITY_CHECK=${CUSTOM_MCP_SECURITY_CHECK}
|
||||
- CUSTOM_MCP_PROTOCOL=${CUSTOM_MCP_PROTOCOL}
|
||||
- HTTP_DENY_LIST=${HTTP_DENY_LIST}
|
||||
- TRUST_PROXY=${TRUST_PROXY}
|
||||
ports:
|
||||
- '${PORT}:${PORT}'
|
||||
healthcheck:
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ DATABASE_PATH=/root/.flowise
|
|||
# DATABASE_USER=root
|
||||
# DATABASE_PASSWORD=mypassword
|
||||
# DATABASE_SSL=true
|
||||
# DATABASE_REJECT_UNAUTHORIZED=true
|
||||
# DATABASE_SSL_KEY_BASE64=<Self signed certificate in BASE64>
|
||||
|
||||
|
||||
|
|
@ -37,8 +38,11 @@ SECRETKEY_PATH=/root/.flowise
|
|||
# DEBUG=true
|
||||
LOG_PATH=/root/.flowise/logs
|
||||
# LOG_LEVEL=info #(error | warn | info | verbose | debug)
|
||||
# LOG_SANITIZE_BODY_FIELDS=password,pwd,pass,secret,token,apikey,api_key,accesstoken,access_token,refreshtoken,refresh_token,clientsecret,client_secret,privatekey,private_key,secretkey,secret_key,auth,authorization,credential,credentials
|
||||
# LOG_SANITIZE_HEADER_FIELDS=authorization,x-api-key,x-auth-token,cookie
|
||||
# TOOL_FUNCTION_BUILTIN_DEP=crypto,fs
|
||||
# TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash
|
||||
# ALLOW_BUILTIN_DEP=false
|
||||
|
||||
|
||||
############################################################################################################
|
||||
|
|
@ -97,6 +101,7 @@ JWT_TOKEN_EXPIRY_IN_MINUTES=360
|
|||
JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200
|
||||
# EXPIRE_AUTH_TOKENS_ON_RESTART=true # (if you need to expire all tokens on app restart)
|
||||
# EXPRESS_SESSION_SECRET=flowise
|
||||
# SECURE_COOKIES=
|
||||
|
||||
# INVITE_TOKEN_EXPIRY_IN_HOURS=24
|
||||
# PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=15
|
||||
|
|
@ -162,4 +167,14 @@ JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200
|
|||
# REDIS_KEY=
|
||||
# REDIS_CA=
|
||||
# REDIS_KEEP_ALIVE=
|
||||
# ENABLE_BULLMQ_DASHBOARD=
|
||||
# ENABLE_BULLMQ_DASHBOARD=
|
||||
|
||||
|
||||
############################################################################################################
|
||||
############################################## SECURITY ####################################################
|
||||
############################################################################################################
|
||||
|
||||
# HTTP_DENY_LIST=
|
||||
# CUSTOM_MCP_SECURITY_CHECK=true
|
||||
# CUSTOM_MCP_PROTOCOL=sse #(stdio | sse)
|
||||
# TRUST_PROXY=true #(true | false | 1 | loopback| linklocal | uniquelocal | IP addresses | loopback, IP addresses)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ RUN apk add --no-cache build-base cairo-dev pango-dev
|
|||
# Install Chromium and curl for container-level health checks
|
||||
RUN apk add --no-cache chromium curl
|
||||
|
||||
#install PNPM globaly
|
||||
#install PNPM globally
|
||||
RUN npm install -g pnpm
|
||||
|
||||
ENV PUPPETEER_SKIP_DOWNLOAD=true
|
||||
|
|
|
|||
|
|
@ -31,10 +31,13 @@ services:
|
|||
- DEBUG=${DEBUG}
|
||||
- LOG_PATH=${LOG_PATH}
|
||||
- LOG_LEVEL=${LOG_LEVEL}
|
||||
- LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS}
|
||||
- LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS}
|
||||
|
||||
# CUSTOM TOOL DEPENDENCIES
|
||||
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
||||
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
||||
- TOOL_FUNCTION_EXTERNAL_DEP=${TOOL_FUNCTION_EXTERNAL_DEP}
|
||||
- ALLOW_BUILTIN_DEP=${ALLOW_BUILTIN_DEP}
|
||||
|
||||
# STORAGE
|
||||
- STORAGE_TYPE=${STORAGE_TYPE}
|
||||
|
|
@ -73,6 +76,7 @@ services:
|
|||
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
||||
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
||||
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
||||
- SECURE_COOKIES=${SECURE_COOKIES}
|
||||
|
||||
# EMAIL
|
||||
- SMTP_HOST=${SMTP_HOST}
|
||||
|
|
@ -123,6 +127,12 @@ services:
|
|||
- REDIS_CA=${REDIS_CA}
|
||||
- REDIS_KEEP_ALIVE=${REDIS_KEEP_ALIVE}
|
||||
- ENABLE_BULLMQ_DASHBOARD=${ENABLE_BULLMQ_DASHBOARD}
|
||||
|
||||
# SECURITY
|
||||
- CUSTOM_MCP_SECURITY_CHECK=${CUSTOM_MCP_SECURITY_CHECK}
|
||||
- CUSTOM_MCP_PROTOCOL=${CUSTOM_MCP_PROTOCOL}
|
||||
- HTTP_DENY_LIST=${HTTP_DENY_LIST}
|
||||
- TRUST_PROXY=${TRUST_PROXY}
|
||||
ports:
|
||||
- '${WORKER_PORT}:${WORKER_PORT}'
|
||||
healthcheck:
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ Flowise 在一个单一的单体存储库中有 3 个不同的模块。
|
|||
pnpm start
|
||||
```
|
||||
|
||||
11. 提交代码并从指向 [Flowise 主分支](https://github.com/FlowiseAI/Flowise/tree/master) 的分叉分支上提交 Pull Request。
|
||||
11. 提交代码并从指向 [Flowise 主分支](https://github.com/FlowiseAI/Flowise/tree/main) 的分叉分支上提交 Pull Request。
|
||||
|
||||
## 🌱 环境变量
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
version: "2"
|
||||
services:
|
||||
otel-collector:
|
||||
read_only: true
|
||||
image: otel/opentelemetry-collector-contrib
|
||||
command: ["--config=/etc/otelcol-contrib/config.yaml", "--feature-gates=-exporter.datadogexporter.DisableAPMStats", "${OTELCOL_ARGS}"]
|
||||
volumes:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "flowise",
|
||||
"version": "3.0.7",
|
||||
"version": "3.0.11",
|
||||
"private": true,
|
||||
"homepage": "https://flowiseai.com",
|
||||
"workspaces": [
|
||||
|
|
@ -51,7 +51,7 @@
|
|||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-unused-imports": "^2.0.0",
|
||||
"husky": "^8.0.1",
|
||||
"kill-port": "^2.0.1",
|
||||
"kill-port": "2.0.1",
|
||||
"lint-staged": "^13.0.3",
|
||||
"prettier": "^2.7.1",
|
||||
"pretty-quick": "^3.1.3",
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class ElevenLabsApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Eleven Labs API'
|
||||
this.name = 'elevenLabsApi'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Sign up for a Eleven Labs account and <a target="_blank" href="https://elevenlabs.io/app/settings/api-keys">create an API Key</a>.'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Eleven Labs API Key',
|
||||
name: 'elevenLabsApiKey',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: ElevenLabsApi }
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class TeradataBearerTokenCredential implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
description: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Teradata Bearer Token'
|
||||
this.name = 'teradataBearerToken'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Refer to <a target="_blank" href="https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-Vector-Store-User-Guide/Setting-up-Vector-Store/Importing-Modules-Required-for-Vector-Store">official guide</a> on how to get Teradata Bearer Token'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Token',
|
||||
name: 'token',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: TeradataBearerTokenCredential }
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class TeradataTD2Credential implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Teradata TD2 Auth'
|
||||
this.name = 'teradataTD2Auth'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Teradata TD2 Auth Username',
|
||||
name: 'tdUsername',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Teradata TD2 Auth Password',
|
||||
name: 'tdPassword',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: TeradataTD2Credential }
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class TeradataVectorStoreApiCredentials implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Teradata Vector Store API Credentials'
|
||||
this.name = 'teradataVectorStoreApiCredentials'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Teradata Host IP',
|
||||
name: 'tdHostIp',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Username',
|
||||
name: 'tdUsername',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Password',
|
||||
name: 'tdPassword',
|
||||
type: 'password'
|
||||
},
|
||||
{
|
||||
label: 'Vector_Store_Base_URL',
|
||||
name: 'baseURL',
|
||||
description: 'Teradata Vector Store Base URL',
|
||||
placeholder: `Base_URL`,
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'JWT Token',
|
||||
name: 'jwtToken',
|
||||
type: 'password',
|
||||
description: 'Bearer token for JWT authentication',
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: TeradataVectorStoreApiCredentials }
|
||||
|
|
@ -3,6 +3,27 @@
|
|||
{
|
||||
"name": "awsChatBedrock",
|
||||
"models": [
|
||||
{
|
||||
"label": "anthropic.claude-opus-4-5-20251101-v1:0",
|
||||
"name": "anthropic.claude-opus-4-5-20251101-v1:0",
|
||||
"description": "Claude 4.5 Opus",
|
||||
"input_cost": 0.000005,
|
||||
"output_cost": 0.000025
|
||||
},
|
||||
{
|
||||
"label": "anthropic.claude-sonnet-4-5-20250929-v1:0",
|
||||
"name": "anthropic.claude-sonnet-4-5-20250929-v1:0",
|
||||
"description": "Claude 4.5 Sonnet",
|
||||
"input_cost": 0.000003,
|
||||
"output_cost": 0.000015
|
||||
},
|
||||
{
|
||||
"label": "anthropic.claude-haiku-4-5-20251001-v1:0",
|
||||
"name": "anthropic.claude-haiku-4-5-20251001-v1:0",
|
||||
"description": "Claude 4.5 Haiku",
|
||||
"input_cost": 0.000001,
|
||||
"output_cost": 0.000005
|
||||
},
|
||||
{
|
||||
"label": "openai.gpt-oss-20b-1:0",
|
||||
"name": "openai.gpt-oss-20b-1:0",
|
||||
|
|
@ -301,6 +322,12 @@
|
|||
{
|
||||
"name": "azureChatOpenAI",
|
||||
"models": [
|
||||
{
|
||||
"label": "gpt-5.1",
|
||||
"name": "gpt-5.1",
|
||||
"input_cost": 0.00000125,
|
||||
"output_cost": 0.00001
|
||||
},
|
||||
{
|
||||
"label": "gpt-5",
|
||||
"name": "gpt-5",
|
||||
|
|
@ -486,11 +513,25 @@
|
|||
"name": "chatAnthropic",
|
||||
"models": [
|
||||
{
|
||||
"label": "claude-opus-4-1",
|
||||
"name": "claude-opus-4-1",
|
||||
"description": "Claude 4.1 Opus",
|
||||
"input_cost": 0.000015,
|
||||
"output_cost": 0.000075
|
||||
"label": "claude-opus-4-5",
|
||||
"name": "claude-opus-4-5",
|
||||
"description": "Claude 4.5 Opus",
|
||||
"input_cost": 0.000005,
|
||||
"output_cost": 0.000025
|
||||
},
|
||||
{
|
||||
"label": "claude-sonnet-4-5",
|
||||
"name": "claude-sonnet-4-5",
|
||||
"description": "Claude 4.5 Sonnet",
|
||||
"input_cost": 0.000003,
|
||||
"output_cost": 0.000015
|
||||
},
|
||||
{
|
||||
"label": "claude-haiku-4-5",
|
||||
"name": "claude-haiku-4-5",
|
||||
"description": "Claude 4.5 Haiku",
|
||||
"input_cost": 0.000001,
|
||||
"output_cost": 0.000005
|
||||
},
|
||||
{
|
||||
"label": "claude-sonnet-4-0",
|
||||
|
|
@ -499,6 +540,13 @@
|
|||
"input_cost": 0.000003,
|
||||
"output_cost": 0.000015
|
||||
},
|
||||
{
|
||||
"label": "claude-opus-4-1",
|
||||
"name": "claude-opus-4-1",
|
||||
"description": "Claude 4.1 Opus",
|
||||
"input_cost": 0.000015,
|
||||
"output_cost": 0.000075
|
||||
},
|
||||
{
|
||||
"label": "claude-opus-4-0",
|
||||
"name": "claude-opus-4-0",
|
||||
|
|
@ -593,6 +641,18 @@
|
|||
{
|
||||
"name": "chatGoogleGenerativeAI",
|
||||
"models": [
|
||||
{
|
||||
"label": "gemini-3-pro-preview",
|
||||
"name": "gemini-3-pro-preview",
|
||||
"input_cost": 0.00002,
|
||||
"output_cost": 0.00012
|
||||
},
|
||||
{
|
||||
"label": "gemini-3-pro-image-preview",
|
||||
"name": "gemini-3-pro-image-preview",
|
||||
"input_cost": 0.00002,
|
||||
"output_cost": 0.00012
|
||||
},
|
||||
{
|
||||
"label": "gemini-2.5-pro",
|
||||
"name": "gemini-2.5-pro",
|
||||
|
|
@ -605,6 +665,12 @@
|
|||
"input_cost": 1.25e-6,
|
||||
"output_cost": 0.00001
|
||||
},
|
||||
{
|
||||
"label": "gemini-2.5-flash-image",
|
||||
"name": "gemini-2.5-flash-image",
|
||||
"input_cost": 1.25e-6,
|
||||
"output_cost": 0.00001
|
||||
},
|
||||
{
|
||||
"label": "gemini-2.5-flash-lite",
|
||||
"name": "gemini-2.5-flash-lite",
|
||||
|
|
@ -657,6 +723,12 @@
|
|||
{
|
||||
"name": "chatGoogleVertexAI",
|
||||
"models": [
|
||||
{
|
||||
"label": "gemini-3-pro-preview",
|
||||
"name": "gemini-3-pro-preview",
|
||||
"input_cost": 0.00002,
|
||||
"output_cost": 0.00012
|
||||
},
|
||||
{
|
||||
"label": "gemini-2.5-pro",
|
||||
"name": "gemini-2.5-pro",
|
||||
|
|
@ -723,6 +795,27 @@
|
|||
"input_cost": 1.25e-7,
|
||||
"output_cost": 3.75e-7
|
||||
},
|
||||
{
|
||||
"label": "claude-opus-4-5@20251101",
|
||||
"name": "claude-opus-4-5@20251101",
|
||||
"description": "Claude 4.5 Opus",
|
||||
"input_cost": 0.000005,
|
||||
"output_cost": 0.000025
|
||||
},
|
||||
{
|
||||
"label": "claude-sonnet-4-5@20250929",
|
||||
"name": "claude-sonnet-4-5@20250929",
|
||||
"description": "Claude 4.5 Sonnet",
|
||||
"input_cost": 0.000003,
|
||||
"output_cost": 0.000015
|
||||
},
|
||||
{
|
||||
"label": "claude-haiku-4-5@20251001",
|
||||
"name": "claude-haiku-4-5@20251001",
|
||||
"description": "Claude 4.5 Haiku",
|
||||
"input_cost": 0.000001,
|
||||
"output_cost": 0.000005
|
||||
},
|
||||
{
|
||||
"label": "claude-opus-4-1@20250805",
|
||||
"name": "claude-opus-4-1@20250805",
|
||||
|
|
@ -954,6 +1047,12 @@
|
|||
{
|
||||
"name": "chatOpenAI",
|
||||
"models": [
|
||||
{
|
||||
"label": "gpt-5.1",
|
||||
"name": "gpt-5.1",
|
||||
"input_cost": 0.00000125,
|
||||
"output_cost": 0.00001
|
||||
},
|
||||
{
|
||||
"label": "gpt-5",
|
||||
"name": "gpt-5",
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -317,7 +317,7 @@ class Condition_Agentflow implements INode {
|
|||
}
|
||||
}
|
||||
|
||||
// If no condition is fullfilled, add isFulfilled to the ELSE condition
|
||||
// If no condition is fulfilled, add isFulfilled to the ELSE condition
|
||||
const dummyElseConditionData = {
|
||||
type: 'string',
|
||||
value1: '',
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ class CustomFunction_Agentflow implements INode {
|
|||
constructor() {
|
||||
this.label = 'Custom Function'
|
||||
this.name = 'customFunctionAgentflow'
|
||||
this.version = 1.0
|
||||
this.version = 1.1
|
||||
this.type = 'CustomFunction'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Execute custom function'
|
||||
|
|
@ -107,8 +107,7 @@ class CustomFunction_Agentflow implements INode {
|
|||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys',
|
||||
freeSolo: true
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
|
|
@ -134,7 +133,7 @@ class CustomFunction_Agentflow implements INode {
|
|||
|
||||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
|
||||
const javascriptFunction = nodeData.inputs?.customFunctionJavascriptFunction as string
|
||||
const functionInputVariables = nodeData.inputs?.customFunctionInputVariables as ICustomFunctionInputVariables[]
|
||||
const functionInputVariables = (nodeData.inputs?.customFunctionInputVariables as ICustomFunctionInputVariables[]) ?? []
|
||||
const _customFunctionUpdateState = nodeData.inputs?.customFunctionUpdateState
|
||||
|
||||
const state = options.agentflowRuntime?.state as ICommonObject
|
||||
|
|
@ -147,11 +146,17 @@ class CustomFunction_Agentflow implements INode {
|
|||
|
||||
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
||||
const flow = {
|
||||
input,
|
||||
state,
|
||||
chatflowId: options.chatflowid,
|
||||
sessionId: options.sessionId,
|
||||
chatId: options.chatId,
|
||||
input,
|
||||
state
|
||||
rawOutput: options.postProcessing?.rawOutput || '',
|
||||
chatHistory: options.postProcessing?.chatHistory || [],
|
||||
sourceDocuments: options.postProcessing?.sourceDocuments,
|
||||
usedTools: options.postProcessing?.usedTools,
|
||||
artifacts: options.postProcessing?.artifacts,
|
||||
fileAnnotations: options.postProcessing?.fileAnnotations
|
||||
}
|
||||
|
||||
// Create additional sandbox variables for custom function inputs
|
||||
|
|
@ -175,8 +180,7 @@ class CustomFunction_Agentflow implements INode {
|
|||
try {
|
||||
const response = await executeJavaScriptCode(javascriptFunction, sandbox, {
|
||||
libraries: ['axios'],
|
||||
streamOutput,
|
||||
timeout: 10000
|
||||
streamOutput
|
||||
})
|
||||
|
||||
let finalOutput = response
|
||||
|
|
|
|||
|
|
@ -8,8 +8,7 @@ import {
|
|||
IServerSideEventStreamer
|
||||
} from '../../../src/Interface'
|
||||
import axios, { AxiosRequestConfig } from 'axios'
|
||||
import { getCredentialData, getCredentialParam, processTemplateVariables } from '../../../src/utils'
|
||||
import JSON5 from 'json5'
|
||||
import { getCredentialData, getCredentialParam, processTemplateVariables, parseJsonBody } from '../../../src/utils'
|
||||
import { DataSource } from 'typeorm'
|
||||
import { BaseMessageLike } from '@langchain/core/messages'
|
||||
import { updateFlowState } from '../utils'
|
||||
|
|
@ -31,7 +30,7 @@ class ExecuteFlow_Agentflow implements INode {
|
|||
constructor() {
|
||||
this.label = 'Execute Flow'
|
||||
this.name = 'executeFlowAgentflow'
|
||||
this.version = 1.1
|
||||
this.version = 1.2
|
||||
this.type = 'ExecuteFlow'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Execute another flow'
|
||||
|
|
@ -103,8 +102,7 @@ class ExecuteFlow_Agentflow implements INode {
|
|||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys',
|
||||
freeSolo: true
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
|
|
@ -168,7 +166,7 @@ class ExecuteFlow_Agentflow implements INode {
|
|||
let overrideConfig = nodeData.inputs?.executeFlowOverrideConfig
|
||||
if (typeof overrideConfig === 'string' && overrideConfig.startsWith('{') && overrideConfig.endsWith('}')) {
|
||||
try {
|
||||
overrideConfig = JSON5.parse(overrideConfig)
|
||||
overrideConfig = parseJsonBody(overrideConfig)
|
||||
} catch (parseError) {
|
||||
throw new Error(`Invalid JSON in executeFlowOverrideConfig: ${parseError.message}`)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,9 +2,8 @@ import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Inter
|
|||
import { AxiosRequestConfig, Method, ResponseType } from 'axios'
|
||||
import FormData from 'form-data'
|
||||
import * as querystring from 'querystring'
|
||||
import { getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils'
|
||||
import { secureAxiosRequest } from '../../../src/httpSecurity'
|
||||
import JSON5 from 'json5'
|
||||
|
||||
class HTTP_Agentflow implements INode {
|
||||
label: string
|
||||
|
|
@ -20,16 +19,6 @@ class HTTP_Agentflow implements INode {
|
|||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
|
||||
private parseJsonBody(body: string): any {
|
||||
try {
|
||||
return JSON5.parse(body)
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Invalid JSON format in body. Original error: ${error.message}. Please ensure your JSON is properly formatted with quoted strings and valid escape sequences.`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
constructor() {
|
||||
this.label = 'HTTP'
|
||||
this.name = 'httpAgentflow'
|
||||
|
|
@ -78,7 +67,8 @@ class HTTP_Agentflow implements INode {
|
|||
{
|
||||
label: 'URL',
|
||||
name: 'url',
|
||||
type: 'string'
|
||||
type: 'string',
|
||||
acceptVariable: true
|
||||
},
|
||||
{
|
||||
label: 'Headers',
|
||||
|
|
@ -285,7 +275,7 @@ class HTTP_Agentflow implements INode {
|
|||
if (method !== 'GET' && body) {
|
||||
switch (bodyType) {
|
||||
case 'json': {
|
||||
requestConfig.data = typeof body === 'string' ? this.parseJsonBody(body) : body
|
||||
requestConfig.data = typeof body === 'string' ? parseJsonBody(body) : body
|
||||
requestHeaders['Content-Type'] = 'application/json'
|
||||
break
|
||||
}
|
||||
|
|
@ -303,7 +293,7 @@ class HTTP_Agentflow implements INode {
|
|||
break
|
||||
}
|
||||
case 'xWwwFormUrlencoded':
|
||||
requestConfig.data = querystring.stringify(typeof body === 'string' ? this.parseJsonBody(body) : body)
|
||||
requestConfig.data = querystring.stringify(typeof body === 'string' ? parseJsonBody(body) : body)
|
||||
requestHeaders['Content-Type'] = 'application/x-www-form-urlencoded'
|
||||
break
|
||||
}
|
||||
|
|
|
|||
|
|
@ -241,8 +241,11 @@ class HumanInput_Agentflow implements INode {
|
|||
if (isStreamable) {
|
||||
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
|
||||
for await (const chunk of await llmNodeInstance.stream(messages)) {
|
||||
sseStreamer.streamTokenEvent(chatId, chunk.content.toString())
|
||||
response = response.concat(chunk)
|
||||
const content = typeof chunk === 'string' ? chunk : chunk.content.toString()
|
||||
sseStreamer.streamTokenEvent(chatId, content)
|
||||
|
||||
const messageChunk = typeof chunk === 'string' ? new AIMessageChunk(chunk) : chunk
|
||||
response = response.concat(messageChunk)
|
||||
}
|
||||
humanInputDescription = response.content as string
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import JSON5 from 'json5'
|
||||
import { parseJsonBody } from '../../../src/utils'
|
||||
|
||||
class Iteration_Agentflow implements INode {
|
||||
label: string
|
||||
|
|
@ -42,10 +42,10 @@ class Iteration_Agentflow implements INode {
|
|||
// Helper function to clean JSON strings with redundant backslashes
|
||||
const safeParseJson = (str: string): string => {
|
||||
try {
|
||||
return JSON5.parse(str)
|
||||
return parseJsonBody(str)
|
||||
} catch {
|
||||
// Try parsing after cleaning
|
||||
return JSON5.parse(str.replace(/\\(["'[\]{}])/g, '$1'))
|
||||
return parseJsonBody(str.replace(/\\(["'[\]{}])/g, '$1'))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,17 +2,20 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
|||
import { ICommonObject, IMessage, INode, INodeData, INodeOptionsValue, INodeParams, IServerSideEventStreamer } from '../../../src/Interface'
|
||||
import { AIMessageChunk, BaseMessageLike, MessageContentText } from '@langchain/core/messages'
|
||||
import { DEFAULT_SUMMARIZER_TEMPLATE } from '../prompt'
|
||||
import { z } from 'zod'
|
||||
import { AnalyticHandler } from '../../../src/handler'
|
||||
import { ILLMMessage, IStructuredOutput } from '../Interface.Agentflow'
|
||||
import { ILLMMessage } from '../Interface.Agentflow'
|
||||
import {
|
||||
addImageArtifactsToMessages,
|
||||
extractArtifactsFromResponse,
|
||||
getPastChatHistoryImageMessages,
|
||||
getUniqueImageMessages,
|
||||
processMessagesWithImages,
|
||||
replaceBase64ImagesWithFileReferences,
|
||||
replaceInlineDataWithFileReferences,
|
||||
updateFlowState
|
||||
} from '../utils'
|
||||
import { processTemplateVariables } from '../../../src/utils'
|
||||
import { processTemplateVariables, configureStructuredOutput } from '../../../src/utils'
|
||||
import { flatten } from 'lodash'
|
||||
|
||||
class LLM_Agentflow implements INode {
|
||||
label: string
|
||||
|
|
@ -31,7 +34,7 @@ class LLM_Agentflow implements INode {
|
|||
constructor() {
|
||||
this.label = 'LLM'
|
||||
this.name = 'llmAgentflow'
|
||||
this.version = 1.0
|
||||
this.version = 1.1
|
||||
this.type = 'LLM'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Large language models to analyze user-provided inputs and generate responses'
|
||||
|
|
@ -287,8 +290,7 @@ class LLM_Agentflow implements INode {
|
|||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys',
|
||||
freeSolo: true
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
|
|
@ -448,10 +450,16 @@ class LLM_Agentflow implements INode {
|
|||
}
|
||||
delete nodeData.inputs?.llmMessages
|
||||
|
||||
/**
|
||||
* Add image artifacts from previous assistant responses as user messages
|
||||
* Images are converted from FILE-STORAGE::<image_path> to base 64 image_url format
|
||||
*/
|
||||
await addImageArtifactsToMessages(messages, options)
|
||||
|
||||
// Configure structured output if specified
|
||||
const isStructuredOutput = _llmStructuredOutput && Array.isArray(_llmStructuredOutput) && _llmStructuredOutput.length > 0
|
||||
if (isStructuredOutput) {
|
||||
llmNodeInstance = this.configureStructuredOutput(llmNodeInstance, _llmStructuredOutput)
|
||||
llmNodeInstance = configureStructuredOutput(llmNodeInstance, _llmStructuredOutput)
|
||||
}
|
||||
|
||||
// Initialize response and determine if streaming is possible
|
||||
|
|
@ -467,9 +475,11 @@ class LLM_Agentflow implements INode {
|
|||
|
||||
// Track execution time
|
||||
const startTime = Date.now()
|
||||
|
||||
const sseStreamer: IServerSideEventStreamer | undefined = options.sseStreamer
|
||||
|
||||
/*
|
||||
* Invoke LLM
|
||||
*/
|
||||
if (isStreamable) {
|
||||
response = await this.handleStreamingResponse(sseStreamer, llmNodeInstance, messages, chatId, abortController)
|
||||
} else {
|
||||
|
|
@ -494,6 +504,40 @@ class LLM_Agentflow implements INode {
|
|||
const endTime = Date.now()
|
||||
const timeDelta = endTime - startTime
|
||||
|
||||
// Extract artifacts and file annotations from response metadata
|
||||
let artifacts: any[] = []
|
||||
let fileAnnotations: any[] = []
|
||||
if (response.response_metadata) {
|
||||
const {
|
||||
artifacts: extractedArtifacts,
|
||||
fileAnnotations: extractedFileAnnotations,
|
||||
savedInlineImages
|
||||
} = await extractArtifactsFromResponse(response.response_metadata, newNodeData, options)
|
||||
|
||||
if (extractedArtifacts.length > 0) {
|
||||
artifacts = extractedArtifacts
|
||||
|
||||
// Stream artifacts if this is the last node
|
||||
if (isLastNode && sseStreamer) {
|
||||
sseStreamer.streamArtifactsEvent(chatId, artifacts)
|
||||
}
|
||||
}
|
||||
|
||||
if (extractedFileAnnotations.length > 0) {
|
||||
fileAnnotations = extractedFileAnnotations
|
||||
|
||||
// Stream file annotations if this is the last node
|
||||
if (isLastNode && sseStreamer) {
|
||||
sseStreamer.streamFileAnnotationsEvent(chatId, fileAnnotations)
|
||||
}
|
||||
}
|
||||
|
||||
// Replace inlineData base64 with file references in the response
|
||||
if (savedInlineImages && savedInlineImages.length > 0) {
|
||||
replaceInlineDataWithFileReferences(response, savedInlineImages)
|
||||
}
|
||||
}
|
||||
|
||||
// Update flow state if needed
|
||||
let newState = { ...state }
|
||||
if (_llmUpdateState && Array.isArray(_llmUpdateState) && _llmUpdateState.length > 0) {
|
||||
|
|
@ -513,10 +557,22 @@ class LLM_Agentflow implements INode {
|
|||
finalResponse = response.content.map((item: any) => item.text).join('\n')
|
||||
} else if (response.content && typeof response.content === 'string') {
|
||||
finalResponse = response.content
|
||||
} else if (response.content === '') {
|
||||
// Empty response content, this could happen when there is only image data
|
||||
finalResponse = ''
|
||||
} else {
|
||||
finalResponse = JSON.stringify(response, null, 2)
|
||||
}
|
||||
const output = this.prepareOutputObject(response, finalResponse, startTime, endTime, timeDelta, isStructuredOutput)
|
||||
const output = this.prepareOutputObject(
|
||||
response,
|
||||
finalResponse,
|
||||
startTime,
|
||||
endTime,
|
||||
timeDelta,
|
||||
isStructuredOutput,
|
||||
artifacts,
|
||||
fileAnnotations
|
||||
)
|
||||
|
||||
// End analytics tracking
|
||||
if (analyticHandlers && llmIds) {
|
||||
|
|
@ -528,12 +584,23 @@ class LLM_Agentflow implements INode {
|
|||
this.sendStreamingEvents(options, chatId, response)
|
||||
}
|
||||
|
||||
// Stream file annotations if any were extracted
|
||||
if (fileAnnotations.length > 0 && isLastNode && sseStreamer) {
|
||||
sseStreamer.streamFileAnnotationsEvent(chatId, fileAnnotations)
|
||||
}
|
||||
|
||||
// Process template variables in state
|
||||
newState = processTemplateVariables(newState, finalResponse)
|
||||
|
||||
/**
|
||||
* Remove the temporarily added image artifact messages before storing
|
||||
* This is to avoid storing the actual base64 data into database
|
||||
*/
|
||||
const messagesToStore = messages.filter((msg: any) => !msg._isTemporaryImageMessage)
|
||||
|
||||
// Replace the actual messages array with one that includes the file references for images instead of base64 data
|
||||
const messagesWithFileReferences = replaceBase64ImagesWithFileReferences(
|
||||
messages,
|
||||
messagesToStore,
|
||||
runtimeImageMessagesWithFileRef,
|
||||
pastImageMessagesWithFileRef
|
||||
)
|
||||
|
|
@ -584,7 +651,13 @@ class LLM_Agentflow implements INode {
|
|||
{
|
||||
role: returnRole,
|
||||
content: finalResponse,
|
||||
name: nodeData?.label ? nodeData?.label.toLowerCase().replace(/\s/g, '_').trim() : nodeData?.id
|
||||
name: nodeData?.label ? nodeData?.label.toLowerCase().replace(/\s/g, '_').trim() : nodeData?.id,
|
||||
...(((artifacts && artifacts.length > 0) || (fileAnnotations && fileAnnotations.length > 0)) && {
|
||||
additional_kwargs: {
|
||||
...(artifacts && artifacts.length > 0 && { artifacts }),
|
||||
...(fileAnnotations && fileAnnotations.length > 0 && { fileAnnotations })
|
||||
}
|
||||
})
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -754,59 +827,6 @@ class LLM_Agentflow implements INode {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures structured output for the LLM
|
||||
*/
|
||||
private configureStructuredOutput(llmNodeInstance: BaseChatModel, llmStructuredOutput: IStructuredOutput[]): BaseChatModel {
|
||||
try {
|
||||
const zodObj: ICommonObject = {}
|
||||
for (const sch of llmStructuredOutput) {
|
||||
if (sch.type === 'string') {
|
||||
zodObj[sch.key] = z.string().describe(sch.description || '')
|
||||
} else if (sch.type === 'stringArray') {
|
||||
zodObj[sch.key] = z.array(z.string()).describe(sch.description || '')
|
||||
} else if (sch.type === 'number') {
|
||||
zodObj[sch.key] = z.number().describe(sch.description || '')
|
||||
} else if (sch.type === 'boolean') {
|
||||
zodObj[sch.key] = z.boolean().describe(sch.description || '')
|
||||
} else if (sch.type === 'enum') {
|
||||
const enumValues = sch.enumValues?.split(',').map((item: string) => item.trim()) || []
|
||||
zodObj[sch.key] = z
|
||||
.enum(enumValues.length ? (enumValues as [string, ...string[]]) : ['default'])
|
||||
.describe(sch.description || '')
|
||||
} else if (sch.type === 'jsonArray') {
|
||||
const jsonSchema = sch.jsonSchema
|
||||
if (jsonSchema) {
|
||||
try {
|
||||
// Parse the JSON schema
|
||||
const schemaObj = JSON.parse(jsonSchema)
|
||||
|
||||
// Create a Zod schema from the JSON schema
|
||||
const itemSchema = this.createZodSchemaFromJSON(schemaObj)
|
||||
|
||||
// Create an array schema of the item schema
|
||||
zodObj[sch.key] = z.array(itemSchema).describe(sch.description || '')
|
||||
} catch (err) {
|
||||
console.error(`Error parsing JSON schema for ${sch.key}:`, err)
|
||||
// Fallback to generic array of records
|
||||
zodObj[sch.key] = z.array(z.record(z.any())).describe(sch.description || '')
|
||||
}
|
||||
} else {
|
||||
// If no schema provided, use generic array of records
|
||||
zodObj[sch.key] = z.array(z.record(z.any())).describe(sch.description || '')
|
||||
}
|
||||
}
|
||||
}
|
||||
const structuredOutput = z.object(zodObj)
|
||||
|
||||
// @ts-ignore
|
||||
return llmNodeInstance.withStructuredOutput(structuredOutput)
|
||||
} catch (exception) {
|
||||
console.error(exception)
|
||||
return llmNodeInstance
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles streaming response from the LLM
|
||||
*/
|
||||
|
|
@ -823,16 +843,20 @@ class LLM_Agentflow implements INode {
|
|||
for await (const chunk of await llmNodeInstance.stream(messages, { signal: abortController?.signal })) {
|
||||
if (sseStreamer) {
|
||||
let content = ''
|
||||
if (Array.isArray(chunk.content) && chunk.content.length > 0) {
|
||||
|
||||
if (typeof chunk === 'string') {
|
||||
content = chunk
|
||||
} else if (Array.isArray(chunk.content) && chunk.content.length > 0) {
|
||||
const contents = chunk.content as MessageContentText[]
|
||||
content = contents.map((item) => item.text).join('')
|
||||
} else {
|
||||
} else if (chunk.content) {
|
||||
content = chunk.content.toString()
|
||||
}
|
||||
sseStreamer.streamTokenEvent(chatId, content)
|
||||
}
|
||||
|
||||
response = response.concat(chunk)
|
||||
const messageChunk = typeof chunk === 'string' ? new AIMessageChunk(chunk) : chunk
|
||||
response = response.concat(messageChunk)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error during streaming:', error)
|
||||
|
|
@ -854,7 +878,9 @@ class LLM_Agentflow implements INode {
|
|||
startTime: number,
|
||||
endTime: number,
|
||||
timeDelta: number,
|
||||
isStructuredOutput: boolean
|
||||
isStructuredOutput: boolean,
|
||||
artifacts: any[] = [],
|
||||
fileAnnotations: any[] = []
|
||||
): any {
|
||||
const output: any = {
|
||||
content: finalResponse,
|
||||
|
|
@ -873,6 +899,10 @@ class LLM_Agentflow implements INode {
|
|||
output.usageMetadata = response.usage_metadata
|
||||
}
|
||||
|
||||
if (response.response_metadata) {
|
||||
output.responseMetadata = response.response_metadata
|
||||
}
|
||||
|
||||
if (isStructuredOutput && typeof response === 'object') {
|
||||
const structuredOutput = response as Record<string, any>
|
||||
for (const key in structuredOutput) {
|
||||
|
|
@ -882,6 +912,14 @@ class LLM_Agentflow implements INode {
|
|||
}
|
||||
}
|
||||
|
||||
if (artifacts && artifacts.length > 0) {
|
||||
output.artifacts = flatten(artifacts)
|
||||
}
|
||||
|
||||
if (fileAnnotations && fileAnnotations.length > 0) {
|
||||
output.fileAnnotations = fileAnnotations
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
|
|
@ -892,7 +930,12 @@ class LLM_Agentflow implements INode {
|
|||
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
|
||||
|
||||
if (response.tool_calls) {
|
||||
sseStreamer.streamCalledToolsEvent(chatId, response.tool_calls)
|
||||
const formattedToolCalls = response.tool_calls.map((toolCall: any) => ({
|
||||
tool: toolCall.name || 'tool',
|
||||
toolInput: toolCall.args,
|
||||
toolOutput: ''
|
||||
}))
|
||||
sseStreamer.streamCalledToolsEvent(chatId, flatten(formattedToolCalls))
|
||||
}
|
||||
|
||||
if (response.usage_metadata) {
|
||||
|
|
@ -901,107 +944,6 @@ class LLM_Agentflow implements INode {
|
|||
|
||||
sseStreamer.streamEndEvent(chatId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Zod schema from a JSON schema object
|
||||
* @param jsonSchema The JSON schema object
|
||||
* @returns A Zod schema
|
||||
*/
|
||||
private createZodSchemaFromJSON(jsonSchema: any): z.ZodTypeAny {
|
||||
// If the schema is an object with properties, create an object schema
|
||||
if (typeof jsonSchema === 'object' && jsonSchema !== null) {
|
||||
const schemaObj: Record<string, z.ZodTypeAny> = {}
|
||||
|
||||
// Process each property in the schema
|
||||
for (const [key, value] of Object.entries(jsonSchema)) {
|
||||
if (value === null) {
|
||||
// Handle null values
|
||||
schemaObj[key] = z.null()
|
||||
} else if (typeof value === 'object' && !Array.isArray(value)) {
|
||||
// Check if the property has a type definition
|
||||
if ('type' in value) {
|
||||
const type = value.type as string
|
||||
const description = ('description' in value ? (value.description as string) : '') || ''
|
||||
|
||||
// Create the appropriate Zod type based on the type property
|
||||
if (type === 'string') {
|
||||
schemaObj[key] = z.string().describe(description)
|
||||
} else if (type === 'number') {
|
||||
schemaObj[key] = z.number().describe(description)
|
||||
} else if (type === 'boolean') {
|
||||
schemaObj[key] = z.boolean().describe(description)
|
||||
} else if (type === 'array') {
|
||||
// If it's an array type, check if items is defined
|
||||
if ('items' in value && value.items) {
|
||||
const itemSchema = this.createZodSchemaFromJSON(value.items)
|
||||
schemaObj[key] = z.array(itemSchema).describe(description)
|
||||
} else {
|
||||
// Default to array of any if items not specified
|
||||
schemaObj[key] = z.array(z.any()).describe(description)
|
||||
}
|
||||
} else if (type === 'object') {
|
||||
// If it's an object type, check if properties is defined
|
||||
if ('properties' in value && value.properties) {
|
||||
const nestedSchema = this.createZodSchemaFromJSON(value.properties)
|
||||
schemaObj[key] = nestedSchema.describe(description)
|
||||
} else {
|
||||
// Default to record of any if properties not specified
|
||||
schemaObj[key] = z.record(z.any()).describe(description)
|
||||
}
|
||||
} else {
|
||||
// Default to any for unknown types
|
||||
schemaObj[key] = z.any().describe(description)
|
||||
}
|
||||
|
||||
// Check if the property is optional
|
||||
if ('optional' in value && value.optional === true) {
|
||||
schemaObj[key] = schemaObj[key].optional()
|
||||
}
|
||||
} else if (Array.isArray(value)) {
|
||||
// Array values without a type property
|
||||
if (value.length > 0) {
|
||||
// If the array has items, recursively create a schema for the first item
|
||||
const itemSchema = this.createZodSchemaFromJSON(value[0])
|
||||
schemaObj[key] = z.array(itemSchema)
|
||||
} else {
|
||||
// Empty array, allow any array
|
||||
schemaObj[key] = z.array(z.any())
|
||||
}
|
||||
} else {
|
||||
// It's a nested object without a type property, recursively create schema
|
||||
schemaObj[key] = this.createZodSchemaFromJSON(value)
|
||||
}
|
||||
} else if (Array.isArray(value)) {
|
||||
// Array values
|
||||
if (value.length > 0) {
|
||||
// If the array has items, recursively create a schema for the first item
|
||||
const itemSchema = this.createZodSchemaFromJSON(value[0])
|
||||
schemaObj[key] = z.array(itemSchema)
|
||||
} else {
|
||||
// Empty array, allow any array
|
||||
schemaObj[key] = z.array(z.any())
|
||||
}
|
||||
} else {
|
||||
// For primitive values (which shouldn't be in the schema directly)
|
||||
// Use the corresponding Zod type
|
||||
if (typeof value === 'string') {
|
||||
schemaObj[key] = z.string()
|
||||
} else if (typeof value === 'number') {
|
||||
schemaObj[key] = z.number()
|
||||
} else if (typeof value === 'boolean') {
|
||||
schemaObj[key] = z.boolean()
|
||||
} else {
|
||||
schemaObj[key] = z.any()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return z.object(schemaObj)
|
||||
}
|
||||
|
||||
// Fallback to any for unknown types
|
||||
return z.any()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: LLM_Agentflow }
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
|
||||
import { updateFlowState } from '../utils'
|
||||
|
||||
class Loop_Agentflow implements INode {
|
||||
label: string
|
||||
|
|
@ -19,7 +20,7 @@ class Loop_Agentflow implements INode {
|
|||
constructor() {
|
||||
this.label = 'Loop'
|
||||
this.name = 'loopAgentflow'
|
||||
this.version = 1.0
|
||||
this.version = 1.2
|
||||
this.type = 'Loop'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Loop back to a previous node'
|
||||
|
|
@ -40,6 +41,39 @@ class Loop_Agentflow implements INode {
|
|||
name: 'maxLoopCount',
|
||||
type: 'number',
|
||||
default: 5
|
||||
},
|
||||
{
|
||||
label: 'Fallback Message',
|
||||
name: 'fallbackMessage',
|
||||
type: 'string',
|
||||
description: 'Message to display if the loop count is exceeded',
|
||||
placeholder: 'Enter your fallback message here',
|
||||
rows: 4,
|
||||
acceptVariable: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Update Flow State',
|
||||
name: 'loopUpdateState',
|
||||
description: 'Update runtime state during the execution of the workflow',
|
||||
type: 'array',
|
||||
optional: true,
|
||||
acceptVariable: true,
|
||||
array: [
|
||||
{
|
||||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
name: 'value',
|
||||
type: 'string',
|
||||
acceptVariable: true,
|
||||
acceptNodeOutputAsVariable: true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -58,12 +92,20 @@ class Loop_Agentflow implements INode {
|
|||
})
|
||||
}
|
||||
return returnOptions
|
||||
},
|
||||
async listRuntimeStateKeys(_: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
|
||||
const previousNodes = options.previousNodes as ICommonObject[]
|
||||
const startAgentflowNode = previousNodes.find((node) => node.name === 'startAgentflow')
|
||||
const state = startAgentflowNode?.inputs?.startState as ICommonObject[]
|
||||
return state.map((item) => ({ label: item.key, name: item.key }))
|
||||
}
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const loopBackToNode = nodeData.inputs?.loopBackToNode as string
|
||||
const _maxLoopCount = nodeData.inputs?.maxLoopCount as string
|
||||
const fallbackMessage = nodeData.inputs?.fallbackMessage as string
|
||||
const _loopUpdateState = nodeData.inputs?.loopUpdateState
|
||||
|
||||
const state = options.agentflowRuntime?.state as ICommonObject
|
||||
|
||||
|
|
@ -75,16 +117,34 @@ class Loop_Agentflow implements INode {
|
|||
maxLoopCount: _maxLoopCount ? parseInt(_maxLoopCount) : 5
|
||||
}
|
||||
|
||||
const finalOutput = 'Loop back to ' + `${loopBackToNodeLabel} (${loopBackToNodeId})`
|
||||
|
||||
// Update flow state if needed
|
||||
let newState = { ...state }
|
||||
if (_loopUpdateState && Array.isArray(_loopUpdateState) && _loopUpdateState.length > 0) {
|
||||
newState = updateFlowState(state, _loopUpdateState)
|
||||
}
|
||||
|
||||
// Process template variables in state
|
||||
if (newState && Object.keys(newState).length > 0) {
|
||||
for (const key in newState) {
|
||||
if (newState[key].toString().includes('{{ output }}')) {
|
||||
newState[key] = finalOutput
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const returnOutput = {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input: data,
|
||||
output: {
|
||||
content: 'Loop back to ' + `${loopBackToNodeLabel} (${loopBackToNodeId})`,
|
||||
content: finalOutput,
|
||||
nodeID: loopBackToNodeId,
|
||||
maxLoopCount: _maxLoopCount ? parseInt(_maxLoopCount) : 5
|
||||
maxLoopCount: _maxLoopCount ? parseInt(_maxLoopCount) : 5,
|
||||
fallbackMessage
|
||||
},
|
||||
state
|
||||
state: newState
|
||||
}
|
||||
|
||||
return returnOutput
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class Retriever_Agentflow implements INode {
|
|||
constructor() {
|
||||
this.label = 'Retriever'
|
||||
this.name = 'retrieverAgentflow'
|
||||
this.version = 1.0
|
||||
this.version = 1.1
|
||||
this.type = 'Retriever'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Retrieve information from vector database'
|
||||
|
|
@ -87,8 +87,7 @@ class Retriever_Agentflow implements INode {
|
|||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys',
|
||||
freeSolo: true
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ class Tool_Agentflow implements INode {
|
|||
constructor() {
|
||||
this.label = 'Tool'
|
||||
this.name = 'toolAgentflow'
|
||||
this.version = 1.1
|
||||
this.version = 1.2
|
||||
this.type = 'Tool'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Tools allow LLM to interact with external systems'
|
||||
|
|
@ -80,8 +80,7 @@ class Tool_Agentflow implements INode {
|
|||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys',
|
||||
freeSolo: true
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import { BaseMessage, MessageContentImageUrl } from '@langchain/core/messages'
|
||||
import { BaseMessage, MessageContentImageUrl, AIMessageChunk } from '@langchain/core/messages'
|
||||
import { getImageUploads } from '../../src/multiModalUtils'
|
||||
import { getFileFromStorage } from '../../src/storageUtils'
|
||||
import { ICommonObject, IFileUpload } from '../../src/Interface'
|
||||
import { addSingleFileToStorage, getFileFromStorage } from '../../src/storageUtils'
|
||||
import { ICommonObject, IFileUpload, INodeData } from '../../src/Interface'
|
||||
import { BaseMessageLike } from '@langchain/core/messages'
|
||||
import { IFlowState } from './Interface.Agentflow'
|
||||
import { handleEscapeCharacters, mapMimeTypeToInputField } from '../../src/utils'
|
||||
import { getCredentialData, getCredentialParam, handleEscapeCharacters, mapMimeTypeToInputField } from '../../src/utils'
|
||||
import fetch from 'node-fetch'
|
||||
|
||||
export const addImagesToMessages = async (
|
||||
options: ICommonObject,
|
||||
|
|
@ -18,7 +19,8 @@ export const addImagesToMessages = async (
|
|||
for (const upload of imageUploads) {
|
||||
let bf = upload.data
|
||||
if (upload.type == 'stored-file') {
|
||||
const contents = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId)
|
||||
const fileName = upload.name.replace(/^FILE-STORAGE::/, '')
|
||||
const contents = await getFileFromStorage(fileName, options.orgId, options.chatflowid, options.chatId)
|
||||
// as the image is stored in the server, read the file and convert it to base64
|
||||
bf = 'data:' + upload.mime + ';base64,' + contents.toString('base64')
|
||||
|
||||
|
|
@ -89,8 +91,9 @@ export const processMessagesWithImages = async (
|
|||
if (item.type === 'stored-file' && item.name && item.mime.startsWith('image/')) {
|
||||
hasImageReferences = true
|
||||
try {
|
||||
const fileName = item.name.replace(/^FILE-STORAGE::/, '')
|
||||
// Get file contents from storage
|
||||
const contents = await getFileFromStorage(item.name, options.orgId, options.chatflowid, options.chatId)
|
||||
const contents = await getFileFromStorage(fileName, options.orgId, options.chatflowid, options.chatId)
|
||||
|
||||
// Create base64 data URL
|
||||
const base64Data = 'data:' + item.mime + ';base64,' + contents.toString('base64')
|
||||
|
|
@ -322,7 +325,8 @@ export const getPastChatHistoryImageMessages = async (
|
|||
const imageContents: MessageContentImageUrl[] = []
|
||||
for (const upload of uploads) {
|
||||
if (upload.type === 'stored-file' && upload.mime.startsWith('image/')) {
|
||||
const fileData = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId)
|
||||
const fileName = upload.name.replace(/^FILE-STORAGE::/, '')
|
||||
const fileData = await getFileFromStorage(fileName, options.orgId, options.chatflowid, options.chatId)
|
||||
// as the image is stored in the server, read the file and convert it to base64
|
||||
const bf = 'data:' + upload.mime + ';base64,' + fileData.toString('base64')
|
||||
|
||||
|
|
@ -456,6 +460,437 @@ export const getPastChatHistoryImageMessages = async (
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets MIME type from filename extension
|
||||
*/
|
||||
export const getMimeTypeFromFilename = (filename: string): string => {
|
||||
const extension = filename.toLowerCase().split('.').pop()
|
||||
const mimeTypes: { [key: string]: string } = {
|
||||
png: 'image/png',
|
||||
jpg: 'image/jpeg',
|
||||
jpeg: 'image/jpeg',
|
||||
gif: 'image/gif',
|
||||
pdf: 'application/pdf',
|
||||
txt: 'text/plain',
|
||||
csv: 'text/csv',
|
||||
json: 'application/json',
|
||||
html: 'text/html',
|
||||
xml: 'application/xml'
|
||||
}
|
||||
return mimeTypes[extension || ''] || 'application/octet-stream'
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets artifact type from filename extension for UI rendering
|
||||
*/
|
||||
export const getArtifactTypeFromFilename = (filename: string): string => {
|
||||
const extension = filename.toLowerCase().split('.').pop()
|
||||
const artifactTypes: { [key: string]: string } = {
|
||||
png: 'png',
|
||||
jpg: 'jpeg',
|
||||
jpeg: 'jpeg',
|
||||
html: 'html',
|
||||
htm: 'html',
|
||||
md: 'markdown',
|
||||
markdown: 'markdown',
|
||||
json: 'json',
|
||||
js: 'javascript',
|
||||
javascript: 'javascript',
|
||||
tex: 'latex',
|
||||
latex: 'latex',
|
||||
txt: 'text',
|
||||
csv: 'text',
|
||||
pdf: 'text'
|
||||
}
|
||||
return artifactTypes[extension || ''] || 'text'
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves base64 image data to storage and returns file information
|
||||
*/
|
||||
export const saveBase64Image = async (
|
||||
outputItem: any,
|
||||
options: ICommonObject
|
||||
): Promise<{ filePath: string; fileName: string; totalSize: number } | null> => {
|
||||
try {
|
||||
if (!outputItem.result) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Extract base64 data and create buffer
|
||||
const base64Data = outputItem.result
|
||||
const imageBuffer = Buffer.from(base64Data, 'base64')
|
||||
|
||||
// Determine file extension and MIME type
|
||||
const outputFormat = outputItem.output_format || 'png'
|
||||
const fileName = `generated_image_${outputItem.id || Date.now()}.${outputFormat}`
|
||||
const mimeType = outputFormat === 'png' ? 'image/png' : 'image/jpeg'
|
||||
|
||||
// Save the image using the existing storage utility
|
||||
const { path, totalSize } = await addSingleFileToStorage(
|
||||
mimeType,
|
||||
imageBuffer,
|
||||
fileName,
|
||||
options.orgId,
|
||||
options.chatflowid,
|
||||
options.chatId
|
||||
)
|
||||
|
||||
return { filePath: path, fileName, totalSize }
|
||||
} catch (error) {
|
||||
console.error('Error saving base64 image:', error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves Gemini inline image data to storage and returns file information
|
||||
*/
|
||||
export const saveGeminiInlineImage = async (
|
||||
inlineItem: any,
|
||||
options: ICommonObject
|
||||
): Promise<{ filePath: string; fileName: string; totalSize: number } | null> => {
|
||||
try {
|
||||
if (!inlineItem.data || !inlineItem.mimeType) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Extract base64 data and create buffer
|
||||
const base64Data = inlineItem.data
|
||||
const imageBuffer = Buffer.from(base64Data, 'base64')
|
||||
|
||||
// Determine file extension from MIME type
|
||||
const mimeType = inlineItem.mimeType
|
||||
let extension = 'png'
|
||||
if (mimeType.includes('jpeg') || mimeType.includes('jpg')) {
|
||||
extension = 'jpg'
|
||||
} else if (mimeType.includes('png')) {
|
||||
extension = 'png'
|
||||
} else if (mimeType.includes('gif')) {
|
||||
extension = 'gif'
|
||||
} else if (mimeType.includes('webp')) {
|
||||
extension = 'webp'
|
||||
}
|
||||
|
||||
const fileName = `gemini_generated_image_${Date.now()}.${extension}`
|
||||
|
||||
// Save the image using the existing storage utility
|
||||
const { path, totalSize } = await addSingleFileToStorage(
|
||||
mimeType,
|
||||
imageBuffer,
|
||||
fileName,
|
||||
options.orgId,
|
||||
options.chatflowid,
|
||||
options.chatId
|
||||
)
|
||||
|
||||
return { filePath: path, fileName, totalSize }
|
||||
} catch (error) {
|
||||
console.error('Error saving Gemini inline image:', error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads file content from container file citation
|
||||
*/
|
||||
export const downloadContainerFile = async (
|
||||
containerId: string,
|
||||
fileId: string,
|
||||
filename: string,
|
||||
modelNodeData: INodeData,
|
||||
options: ICommonObject
|
||||
): Promise<{ filePath: string; totalSize: number } | null> => {
|
||||
try {
|
||||
const credentialData = await getCredentialData(modelNodeData.credential ?? '', options)
|
||||
const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, modelNodeData)
|
||||
|
||||
if (!openAIApiKey) {
|
||||
console.warn('No OpenAI API key available for downloading container file')
|
||||
return null
|
||||
}
|
||||
|
||||
// Download the file using OpenAI Container API
|
||||
const response = await fetch(`https://api.openai.com/v1/containers/${containerId}/files/${fileId}/content`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: '*/*',
|
||||
Authorization: `Bearer ${openAIApiKey}`
|
||||
}
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
console.warn(
|
||||
`Failed to download container file ${fileId} from container ${containerId}: ${response.status} ${response.statusText}`
|
||||
)
|
||||
return null
|
||||
}
|
||||
|
||||
// Extract the binary data from the Response object
|
||||
const data = await response.arrayBuffer()
|
||||
const dataBuffer = Buffer.from(data)
|
||||
const mimeType = getMimeTypeFromFilename(filename)
|
||||
|
||||
// Store the file using the same storage utility as OpenAIAssistant
|
||||
const { path, totalSize } = await addSingleFileToStorage(
|
||||
mimeType,
|
||||
dataBuffer,
|
||||
filename,
|
||||
options.orgId,
|
||||
options.chatflowid,
|
||||
options.chatId
|
||||
)
|
||||
|
||||
return { filePath: path, totalSize }
|
||||
} catch (error) {
|
||||
console.error('Error downloading container file:', error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace inlineData base64 with file references in the response content
|
||||
*/
|
||||
export const replaceInlineDataWithFileReferences = (
|
||||
response: AIMessageChunk,
|
||||
savedInlineImages: Array<{ filePath: string; fileName: string; mimeType: string }>
|
||||
): void => {
|
||||
// Check if content is an array
|
||||
if (!Array.isArray(response.content)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Replace base64 data with file references in response content
|
||||
let savedImageIndex = 0
|
||||
for (let i = 0; i < response.content.length; i++) {
|
||||
const contentItem = response.content[i]
|
||||
if (
|
||||
typeof contentItem === 'object' &&
|
||||
contentItem.type === 'inlineData' &&
|
||||
contentItem.inlineData &&
|
||||
savedImageIndex < savedInlineImages.length
|
||||
) {
|
||||
const savedImage = savedInlineImages[savedImageIndex]
|
||||
// Replace with file reference
|
||||
response.content[i] = {
|
||||
type: 'stored-file',
|
||||
name: savedImage.fileName,
|
||||
mime: savedImage.mimeType,
|
||||
path: savedImage.filePath
|
||||
}
|
||||
savedImageIndex++
|
||||
}
|
||||
}
|
||||
|
||||
// Clear the inlineData from response_metadata to avoid duplication
|
||||
if (response.response_metadata?.inlineData) {
|
||||
delete response.response_metadata.inlineData
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts artifacts from response metadata (both annotations and built-in tools)
|
||||
*/
|
||||
export const extractArtifactsFromResponse = async (
|
||||
responseMetadata: any,
|
||||
modelNodeData: INodeData,
|
||||
options: ICommonObject
|
||||
): Promise<{
|
||||
artifacts: any[]
|
||||
fileAnnotations: any[]
|
||||
savedInlineImages?: Array<{ filePath: string; fileName: string; mimeType: string }>
|
||||
}> => {
|
||||
const artifacts: any[] = []
|
||||
const fileAnnotations: any[] = []
|
||||
const savedInlineImages: Array<{ filePath: string; fileName: string; mimeType: string }> = []
|
||||
|
||||
// Handle Gemini inline data (image generation)
|
||||
if (responseMetadata?.inlineData && Array.isArray(responseMetadata.inlineData)) {
|
||||
for (const inlineItem of responseMetadata.inlineData) {
|
||||
if (inlineItem.type === 'gemini_inline_data' && inlineItem.data && inlineItem.mimeType) {
|
||||
try {
|
||||
const savedImageResult = await saveGeminiInlineImage(inlineItem, options)
|
||||
if (savedImageResult) {
|
||||
// Create artifact in the same format as other image artifacts
|
||||
const fileType = getArtifactTypeFromFilename(savedImageResult.fileName)
|
||||
artifacts.push({
|
||||
type: fileType,
|
||||
data: savedImageResult.filePath
|
||||
})
|
||||
|
||||
// Track saved image for replacing base64 data in content
|
||||
savedInlineImages.push({
|
||||
filePath: savedImageResult.filePath,
|
||||
fileName: savedImageResult.fileName,
|
||||
mimeType: inlineItem.mimeType
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing Gemini inline image artifact:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!responseMetadata?.output || !Array.isArray(responseMetadata.output)) {
|
||||
return { artifacts, fileAnnotations, savedInlineImages: savedInlineImages.length > 0 ? savedInlineImages : undefined }
|
||||
}
|
||||
|
||||
for (const outputItem of responseMetadata.output) {
|
||||
// Handle container file citations from annotations
|
||||
if (outputItem.type === 'message' && outputItem.content && Array.isArray(outputItem.content)) {
|
||||
for (const contentItem of outputItem.content) {
|
||||
if (contentItem.annotations && Array.isArray(contentItem.annotations)) {
|
||||
for (const annotation of contentItem.annotations) {
|
||||
if (annotation.type === 'container_file_citation' && annotation.file_id && annotation.filename) {
|
||||
try {
|
||||
// Download and store the file content
|
||||
const downloadResult = await downloadContainerFile(
|
||||
annotation.container_id,
|
||||
annotation.file_id,
|
||||
annotation.filename,
|
||||
modelNodeData,
|
||||
options
|
||||
)
|
||||
|
||||
if (downloadResult) {
|
||||
const fileType = getArtifactTypeFromFilename(annotation.filename)
|
||||
|
||||
if (fileType === 'png' || fileType === 'jpeg' || fileType === 'jpg') {
|
||||
const artifact = {
|
||||
type: fileType,
|
||||
data: downloadResult.filePath
|
||||
}
|
||||
|
||||
artifacts.push(artifact)
|
||||
} else {
|
||||
fileAnnotations.push({
|
||||
filePath: downloadResult.filePath,
|
||||
fileName: annotation.filename
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing annotation:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle built-in tool artifacts (like image generation)
|
||||
if (outputItem.type === 'image_generation_call' && outputItem.result) {
|
||||
try {
|
||||
const savedImageResult = await saveBase64Image(outputItem, options)
|
||||
if (savedImageResult) {
|
||||
// Replace the base64 result with the file path in the response metadata
|
||||
outputItem.result = savedImageResult.filePath
|
||||
|
||||
// Create artifact in the same format as other image artifacts
|
||||
const fileType = getArtifactTypeFromFilename(savedImageResult.fileName)
|
||||
artifacts.push({
|
||||
type: fileType,
|
||||
data: savedImageResult.filePath
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing image generation artifact:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { artifacts, fileAnnotations, savedInlineImages: savedInlineImages.length > 0 ? savedInlineImages : undefined }
|
||||
}
|
||||
|
||||
/**
|
||||
* Add image artifacts from previous assistant messages as user messages
|
||||
* This allows the LLM to see and reference the generated images in the conversation
|
||||
* Messages are marked with a special flag for later removal
|
||||
*/
|
||||
export const addImageArtifactsToMessages = async (messages: BaseMessageLike[], options: ICommonObject): Promise<void> => {
|
||||
const imageExtensions = ['png', 'jpg', 'jpeg', 'gif', 'webp']
|
||||
const messagesToInsert: Array<{ index: number; message: any }> = []
|
||||
|
||||
// Iterate through messages to find assistant messages with image artifacts
|
||||
for (let i = 0; i < messages.length; i++) {
|
||||
const message = messages[i] as any
|
||||
|
||||
// Check if this is an assistant message with artifacts
|
||||
if (
|
||||
(message.role === 'assistant' || message.role === 'ai') &&
|
||||
message.additional_kwargs?.artifacts &&
|
||||
Array.isArray(message.additional_kwargs.artifacts)
|
||||
) {
|
||||
const artifacts = message.additional_kwargs.artifacts
|
||||
const imageArtifacts: Array<{ type: string; name: string; mime: string }> = []
|
||||
|
||||
// Extract image artifacts
|
||||
for (const artifact of artifacts) {
|
||||
if (artifact.type && artifact.data) {
|
||||
// Check if this is an image artifact by file type
|
||||
if (imageExtensions.includes(artifact.type.toLowerCase())) {
|
||||
// Extract filename from the file path
|
||||
const fileName = artifact.data.split('/').pop() || artifact.data
|
||||
const mimeType = `image/${artifact.type.toLowerCase()}`
|
||||
|
||||
imageArtifacts.push({
|
||||
type: 'stored-file',
|
||||
name: fileName,
|
||||
mime: mimeType
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we found image artifacts, prepare to insert a user message after this assistant message
|
||||
if (imageArtifacts.length > 0) {
|
||||
// Check if the next message already contains these image artifacts to avoid duplicates
|
||||
const nextMessage = messages[i + 1] as any
|
||||
const shouldInsert =
|
||||
!nextMessage ||
|
||||
nextMessage.role !== 'user' ||
|
||||
!Array.isArray(nextMessage.content) ||
|
||||
!nextMessage.content.some(
|
||||
(item: any) =>
|
||||
(item.type === 'stored-file' || item.type === 'image_url') &&
|
||||
imageArtifacts.some((artifact) => {
|
||||
// Compare with and without FILE-STORAGE:: prefix
|
||||
const artifactName = artifact.name.replace('FILE-STORAGE::', '')
|
||||
const itemName = item.name?.replace('FILE-STORAGE::', '') || ''
|
||||
return artifactName === itemName
|
||||
})
|
||||
)
|
||||
|
||||
if (shouldInsert) {
|
||||
messagesToInsert.push({
|
||||
index: i + 1,
|
||||
message: {
|
||||
role: 'user',
|
||||
content: imageArtifacts,
|
||||
_isTemporaryImageMessage: true // Mark for later removal
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Insert messages in reverse order to maintain correct indices
|
||||
for (let i = messagesToInsert.length - 1; i >= 0; i--) {
|
||||
const { index, message } = messagesToInsert[i]
|
||||
messages.splice(index, 0, message)
|
||||
}
|
||||
|
||||
// Convert stored-file references to base64 image_url format
|
||||
if (messagesToInsert.length > 0) {
|
||||
const { updatedMessages } = await processMessagesWithImages(messages, options)
|
||||
// Replace the messages array content with the updated messages
|
||||
messages.length = 0
|
||||
messages.push(...updatedMessages)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the flow state with new values
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -183,7 +183,7 @@ json.dumps(my_dict)`
|
|||
// TODO: get print console output
|
||||
finalResult = await pyodide.runPythonAsync(code)
|
||||
} catch (error) {
|
||||
throw new Error(`Sorry, I'm unable to find answer for question: "${input}" using follwoing code: "${pythonCode}"`)
|
||||
throw new Error(`Sorry, I'm unable to find answer for question: "${input}" using following code: "${pythonCode}"`)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import { RunnableSequence } from '@langchain/core/runnables'
|
|||
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
||||
import { ChatPromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, PromptTemplate } from '@langchain/core/prompts'
|
||||
import { formatToOpenAIToolMessages } from 'langchain/agents/format_scratchpad/openai_tools'
|
||||
import { getBaseClasses, transformBracesWithColon } from '../../../src/utils'
|
||||
import { getBaseClasses, transformBracesWithColon, convertChatHistoryToText, convertBaseMessagetoIMessage } from '../../../src/utils'
|
||||
import { type ToolsAgentStep } from 'langchain/agents/openai/output_parser'
|
||||
import {
|
||||
FlowiseMemory,
|
||||
|
|
@ -23,8 +23,10 @@ import { Moderation, checkInputs, streamResponse } from '../../moderation/Modera
|
|||
import { formatResponse } from '../../outputparsers/OutputParserHelpers'
|
||||
import type { Document } from '@langchain/core/documents'
|
||||
import { BaseRetriever } from '@langchain/core/retrievers'
|
||||
import { RESPONSE_TEMPLATE } from '../../chains/ConversationalRetrievalQAChain/prompts'
|
||||
import { RESPONSE_TEMPLATE, REPHRASE_TEMPLATE } from '../../chains/ConversationalRetrievalQAChain/prompts'
|
||||
import { addImagesToMessages, llmSupportsVision } from '../../../src/multiModalUtils'
|
||||
import { StringOutputParser } from '@langchain/core/output_parsers'
|
||||
import { Tool } from '@langchain/core/tools'
|
||||
|
||||
class ConversationalRetrievalToolAgent_Agents implements INode {
|
||||
label: string
|
||||
|
|
@ -42,7 +44,7 @@ class ConversationalRetrievalToolAgent_Agents implements INode {
|
|||
constructor(fields?: { sessionId?: string }) {
|
||||
this.label = 'Conversational Retrieval Tool Agent'
|
||||
this.name = 'conversationalRetrievalToolAgent'
|
||||
this.author = 'niztal(falkor)'
|
||||
this.author = 'niztal(falkor) and nikitas-novatix'
|
||||
this.version = 1.0
|
||||
this.type = 'AgentExecutor'
|
||||
this.category = 'Agents'
|
||||
|
|
@ -79,6 +81,26 @@ class ConversationalRetrievalToolAgent_Agents implements INode {
|
|||
optional: true,
|
||||
default: RESPONSE_TEMPLATE
|
||||
},
|
||||
{
|
||||
label: 'Rephrase Prompt',
|
||||
name: 'rephrasePrompt',
|
||||
type: 'string',
|
||||
description: 'Using previous chat history, rephrase question into a standalone question',
|
||||
warning: 'Prompt must include input variables: {chat_history} and {question}',
|
||||
rows: 4,
|
||||
additionalParams: true,
|
||||
optional: true,
|
||||
default: REPHRASE_TEMPLATE
|
||||
},
|
||||
{
|
||||
label: 'Rephrase Model',
|
||||
name: 'rephraseModel',
|
||||
type: 'BaseChatModel',
|
||||
description:
|
||||
'Optional: Use a different (faster/cheaper) model for rephrasing. If not specified, uses the main Tool Calling Chat Model.',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Input Moderation',
|
||||
description: 'Detect text that could generate harmful output and prevent it from being sent to the language model',
|
||||
|
|
@ -103,8 +125,9 @@ class ConversationalRetrievalToolAgent_Agents implements INode {
|
|||
this.sessionId = fields?.sessionId
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
|
||||
return prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
|
||||
// The agent will be prepared in run() with the correct user message - it needs the actual runtime input for rephrasing
|
||||
async init(_nodeData: INodeData, _input: string, _options: ICommonObject): Promise<any> {
|
||||
return null
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
|
||||
|
|
@ -148,6 +171,23 @@ class ConversationalRetrievalToolAgent_Agents implements INode {
|
|||
sseStreamer.streamUsedToolsEvent(chatId, res.usedTools)
|
||||
usedTools = res.usedTools
|
||||
}
|
||||
|
||||
// If the tool is set to returnDirect, stream the output to the client
|
||||
if (res.usedTools && res.usedTools.length) {
|
||||
let inputTools = nodeData.inputs?.tools
|
||||
inputTools = flatten(inputTools)
|
||||
for (const tool of res.usedTools) {
|
||||
const inputTool = inputTools.find((inputTool: Tool) => inputTool.name === tool.tool)
|
||||
if (inputTool && (inputTool as any).returnDirect && shouldStreamResponse) {
|
||||
sseStreamer.streamTokenEvent(chatId, tool.toolOutput)
|
||||
// Prevent CustomChainHandler from streaming the same output again
|
||||
if (res.output === tool.toolOutput) {
|
||||
res.output = ''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// The CustomChainHandler will send the stream end event
|
||||
} else {
|
||||
res = await executor.invoke({ input }, { callbacks: [loggerHandler, ...callbacks] })
|
||||
if (res.sourceDocuments) {
|
||||
|
|
@ -210,9 +250,11 @@ const prepareAgent = async (
|
|||
flowObj: { sessionId?: string; chatId?: string; input?: string }
|
||||
) => {
|
||||
const model = nodeData.inputs?.model as BaseChatModel
|
||||
const rephraseModel = (nodeData.inputs?.rephraseModel as BaseChatModel) || model // Use main model if not specified
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
let systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let rephrasePrompt = nodeData.inputs?.rephrasePrompt as string
|
||||
let tools = nodeData.inputs?.tools
|
||||
tools = flatten(tools)
|
||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||
|
|
@ -220,6 +262,9 @@ const prepareAgent = async (
|
|||
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever
|
||||
|
||||
systemMessage = transformBracesWithColon(systemMessage)
|
||||
if (rephrasePrompt) {
|
||||
rephrasePrompt = transformBracesWithColon(rephrasePrompt)
|
||||
}
|
||||
|
||||
const prompt = ChatPromptTemplate.fromMessages([
|
||||
['system', systemMessage ? systemMessage : `You are a helpful AI assistant.`],
|
||||
|
|
@ -263,6 +308,37 @@ const prepareAgent = async (
|
|||
|
||||
const modelWithTools = model.bindTools(tools)
|
||||
|
||||
// Function to get standalone question (either rephrased or original)
|
||||
const getStandaloneQuestion = async (input: string): Promise<string> => {
|
||||
// If no rephrase prompt, return the original input
|
||||
if (!rephrasePrompt) {
|
||||
return input
|
||||
}
|
||||
|
||||
// Get chat history (use empty string if none)
|
||||
const messages = (await memory.getChatMessages(flowObj?.sessionId, true)) as BaseMessage[]
|
||||
const iMessages = convertBaseMessagetoIMessage(messages)
|
||||
const chatHistoryString = convertChatHistoryToText(iMessages)
|
||||
|
||||
// Always rephrase to normalize/expand user queries for better retrieval
|
||||
try {
|
||||
const CONDENSE_QUESTION_PROMPT = PromptTemplate.fromTemplate(rephrasePrompt)
|
||||
const condenseQuestionChain = RunnableSequence.from([CONDENSE_QUESTION_PROMPT, rephraseModel, new StringOutputParser()])
|
||||
const res = await condenseQuestionChain.invoke({
|
||||
question: input,
|
||||
chat_history: chatHistoryString
|
||||
})
|
||||
return res
|
||||
} catch (error) {
|
||||
console.error('Error rephrasing question:', error)
|
||||
// On error, fall back to original input
|
||||
return input
|
||||
}
|
||||
}
|
||||
|
||||
// Get standalone question before creating runnable
|
||||
const standaloneQuestion = await getStandaloneQuestion(flowObj?.input || '')
|
||||
|
||||
const runnableAgent = RunnableSequence.from([
|
||||
{
|
||||
[inputKey]: (i: { input: string; steps: ToolsAgentStep[] }) => i.input,
|
||||
|
|
@ -272,7 +348,9 @@ const prepareAgent = async (
|
|||
return messages ?? []
|
||||
},
|
||||
context: async (i: { input: string; chatHistory?: string }) => {
|
||||
const relevantDocs = await vectorStoreRetriever.invoke(i.input)
|
||||
// Use the standalone question (rephrased or original) for retrieval
|
||||
const retrievalQuery = standaloneQuestion || i.input
|
||||
const relevantDocs = await vectorStoreRetriever.invoke(retrievalQuery)
|
||||
const formattedDocs = formatDocs(relevantDocs)
|
||||
return formattedDocs
|
||||
}
|
||||
|
|
@ -295,4 +373,6 @@ const prepareAgent = async (
|
|||
return executor
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ConversationalRetrievalToolAgent_Agents }
|
||||
module.exports = {
|
||||
nodeClass: ConversationalRetrievalToolAgent_Agents
|
||||
}
|
||||
|
|
|
|||
|
|
@ -578,7 +578,7 @@ class OpenAIAssistant_Agents implements INode {
|
|||
toolOutput
|
||||
})
|
||||
} catch (e) {
|
||||
await analyticHandlers.onToolEnd(toolIds, e)
|
||||
await analyticHandlers.onToolError(toolIds, e)
|
||||
console.error('Error executing tool', e)
|
||||
throw new Error(
|
||||
`Error executing tool. Tool: ${tool.name}. Thread ID: ${threadId}. Run ID: ${runThreadId}`
|
||||
|
|
@ -703,7 +703,7 @@ class OpenAIAssistant_Agents implements INode {
|
|||
toolOutput
|
||||
})
|
||||
} catch (e) {
|
||||
await analyticHandlers.onToolEnd(toolIds, e)
|
||||
await analyticHandlers.onToolError(toolIds, e)
|
||||
console.error('Error executing tool', e)
|
||||
clearInterval(timeout)
|
||||
reject(
|
||||
|
|
@ -1096,7 +1096,7 @@ async function handleToolSubmission(params: ToolSubmissionParams): Promise<ToolS
|
|||
toolOutput
|
||||
})
|
||||
} catch (e) {
|
||||
await analyticHandlers.onToolEnd(toolIds, e)
|
||||
await analyticHandlers.onToolError(toolIds, e)
|
||||
console.error('Error executing tool', e)
|
||||
throw new Error(`Error executing tool. Tool: ${tool.name}. Thread ID: ${threadId}. Run ID: ${runThreadId}`)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -91,7 +91,7 @@ class ChatAnthropic_ChatModels implements INode {
|
|||
label: 'Extended Thinking',
|
||||
name: 'extendedThinking',
|
||||
type: 'boolean',
|
||||
description: 'Enable extended thinking for reasoning model such as Claude Sonnet 3.7',
|
||||
description: 'Enable extended thinking for reasoning model such as Claude Sonnet 3.7 and Claude 4',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
|
|
|
|||
|
|
@ -174,6 +174,18 @@ class GoogleGenerativeAI_ChatModels implements INode {
|
|||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Thinking Budget',
|
||||
name: 'thinkingBudget',
|
||||
type: 'number',
|
||||
description: 'Guides the number of thinking tokens. -1 for dynamic, 0 to disable, or positive integer (Gemini 2.5 models).',
|
||||
step: 1,
|
||||
optional: true,
|
||||
additionalParams: true,
|
||||
show: {
|
||||
modelName: ['gemini-2.5-pro', 'gemini-2.5-flash', 'gemini-2.5-flash-lite']
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Base URL',
|
||||
name: 'baseUrl',
|
||||
|
|
@ -216,6 +228,7 @@ class GoogleGenerativeAI_ChatModels implements INode {
|
|||
const cache = nodeData.inputs?.cache as BaseCache
|
||||
const streaming = nodeData.inputs?.streaming as boolean
|
||||
const baseUrl = nodeData.inputs?.baseUrl as string | undefined
|
||||
const thinkingBudget = nodeData.inputs?.thinkingBudget as string
|
||||
|
||||
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean
|
||||
|
||||
|
|
@ -235,6 +248,7 @@ class GoogleGenerativeAI_ChatModels implements INode {
|
|||
if (cache) obj.cache = cache
|
||||
if (temperature) obj.temperature = parseFloat(temperature)
|
||||
if (baseUrl) obj.baseUrl = baseUrl
|
||||
if (thinkingBudget) obj.thinkingBudget = parseInt(thinkingBudget, 10)
|
||||
|
||||
let safetySettings: SafetySetting[] = []
|
||||
if (_safetySettings) {
|
||||
|
|
|
|||
|
|
@ -174,6 +174,9 @@ export interface GoogleGenerativeAIChatInput extends BaseChatModelParams, Pick<G
|
|||
* - Gemini 1.0 Pro version gemini-1.0-pro-002
|
||||
*/
|
||||
convertSystemMessageToHumanContent?: boolean | undefined
|
||||
|
||||
/** Thinking budget for Gemini 2.5 thinking models. Supports -1 (dynamic), 0 (off), or positive integers. */
|
||||
thinkingBudget?: number
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -599,10 +602,17 @@ export class LangchainChatGoogleGenerativeAI
|
|||
|
||||
convertSystemMessageToHumanContent: boolean | undefined
|
||||
|
||||
thinkingBudget?: number
|
||||
|
||||
private client: GenerativeModel
|
||||
|
||||
get _isMultimodalModel() {
|
||||
return this.model.includes('vision') || this.model.startsWith('gemini-1.5') || this.model.startsWith('gemini-2')
|
||||
return (
|
||||
this.model.includes('vision') ||
|
||||
this.model.startsWith('gemini-1.5') ||
|
||||
this.model.startsWith('gemini-2') ||
|
||||
this.model.startsWith('gemini-3')
|
||||
)
|
||||
}
|
||||
|
||||
constructor(fields: GoogleGenerativeAIChatInput) {
|
||||
|
|
@ -657,6 +667,7 @@ export class LangchainChatGoogleGenerativeAI
|
|||
|
||||
this.streaming = fields.streaming ?? this.streaming
|
||||
this.json = fields.json
|
||||
this.thinkingBudget = fields.thinkingBudget
|
||||
|
||||
this.client = new GenerativeAI(this.apiKey).getGenerativeModel(
|
||||
{
|
||||
|
|
@ -676,12 +687,22 @@ export class LangchainChatGoogleGenerativeAI
|
|||
baseUrl: fields.baseUrl
|
||||
}
|
||||
)
|
||||
if (this.thinkingBudget !== undefined) {
|
||||
;(this.client.generationConfig as any).thinkingConfig = {
|
||||
...(this.thinkingBudget !== undefined ? { thinkingBudget: this.thinkingBudget } : {})
|
||||
}
|
||||
}
|
||||
this.streamUsage = fields.streamUsage ?? this.streamUsage
|
||||
}
|
||||
|
||||
useCachedContent(cachedContent: CachedContent, modelParams?: ModelParams, requestOptions?: RequestOptions): void {
|
||||
if (!this.apiKey) return
|
||||
this.client = new GenerativeAI(this.apiKey).getGenerativeModelFromCachedContent(cachedContent, modelParams, requestOptions)
|
||||
if (this.thinkingBudget !== undefined) {
|
||||
;(this.client.generationConfig as any).thinkingConfig = {
|
||||
...(this.thinkingBudget !== undefined ? { thinkingBudget: this.thinkingBudget } : {})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get useSystemInstruction(): boolean {
|
||||
|
|
@ -770,6 +791,12 @@ export class LangchainChatGoogleGenerativeAI
|
|||
this.client.systemInstruction = systemInstruction
|
||||
actualPrompt = prompt.slice(1)
|
||||
}
|
||||
|
||||
// Ensure actualPrompt is never empty
|
||||
if (actualPrompt.length === 0) {
|
||||
actualPrompt = [{ role: 'user', parts: [{ text: '...' }] }]
|
||||
}
|
||||
|
||||
const parameters = this.invocationParams(options)
|
||||
|
||||
// Handle streaming
|
||||
|
|
@ -834,6 +861,12 @@ export class LangchainChatGoogleGenerativeAI
|
|||
this.client.systemInstruction = systemInstruction
|
||||
actualPrompt = prompt.slice(1)
|
||||
}
|
||||
|
||||
// Ensure actualPrompt is never empty
|
||||
if (actualPrompt.length === 0) {
|
||||
actualPrompt = [{ role: 'user', parts: [{ text: '...' }] }]
|
||||
}
|
||||
|
||||
const parameters = this.invocationParams(options)
|
||||
const request = {
|
||||
...parameters,
|
||||
|
|
|
|||
|
|
@ -48,6 +48,8 @@ export function getMessageAuthor(message: BaseMessage) {
|
|||
}
|
||||
|
||||
/**
|
||||
* !!! IMPORTANT: Must return 'user' as default instead of throwing error
|
||||
* https://github.com/FlowiseAI/Flowise/issues/4743
|
||||
* Maps a message type to a Google Generative AI chat author.
|
||||
* @param message The message to map.
|
||||
* @param model The model to use for mapping.
|
||||
|
|
@ -450,6 +452,7 @@ export function mapGenerateContentResultToChatResult(
|
|||
const [candidate] = response.candidates
|
||||
const { content: candidateContent, ...generationInfo } = candidate
|
||||
let content: MessageContent | undefined
|
||||
const inlineDataItems: any[] = []
|
||||
|
||||
if (Array.isArray(candidateContent?.parts) && candidateContent.parts.length === 1 && candidateContent.parts[0].text) {
|
||||
content = candidateContent.parts[0].text
|
||||
|
|
@ -470,6 +473,18 @@ export function mapGenerateContentResultToChatResult(
|
|||
type: 'codeExecutionResult',
|
||||
codeExecutionResult: p.codeExecutionResult
|
||||
}
|
||||
} else if ('inlineData' in p && p.inlineData) {
|
||||
// Extract inline image data for processing by Agent
|
||||
inlineDataItems.push({
|
||||
type: 'gemini_inline_data',
|
||||
mimeType: p.inlineData.mimeType,
|
||||
data: p.inlineData.data
|
||||
})
|
||||
// Return the inline data as part of the content structure
|
||||
return {
|
||||
type: 'inlineData',
|
||||
inlineData: p.inlineData
|
||||
}
|
||||
}
|
||||
return p
|
||||
})
|
||||
|
|
@ -486,6 +501,12 @@ export function mapGenerateContentResultToChatResult(
|
|||
text = block?.text ?? text
|
||||
}
|
||||
|
||||
// Build response_metadata with inline data if present
|
||||
const response_metadata: any = {}
|
||||
if (inlineDataItems.length > 0) {
|
||||
response_metadata.inlineData = inlineDataItems
|
||||
}
|
||||
|
||||
const generation: ChatGeneration = {
|
||||
text,
|
||||
message: new AIMessage({
|
||||
|
|
@ -500,7 +521,8 @@ export function mapGenerateContentResultToChatResult(
|
|||
additional_kwargs: {
|
||||
...generationInfo
|
||||
},
|
||||
usage_metadata: extra?.usageMetadata
|
||||
usage_metadata: extra?.usageMetadata,
|
||||
response_metadata: Object.keys(response_metadata).length > 0 ? response_metadata : undefined
|
||||
}),
|
||||
generationInfo
|
||||
}
|
||||
|
|
@ -531,6 +553,8 @@ export function convertResponseContentToChatGenerationChunk(
|
|||
const [candidate] = response.candidates
|
||||
const { content: candidateContent, ...generationInfo } = candidate
|
||||
let content: MessageContent | undefined
|
||||
const inlineDataItems: any[] = []
|
||||
|
||||
// Checks if some parts do not have text. If false, it means that the content is a string.
|
||||
if (Array.isArray(candidateContent?.parts) && candidateContent.parts.every((p) => 'text' in p)) {
|
||||
content = candidateContent.parts.map((p) => p.text).join('')
|
||||
|
|
@ -551,6 +575,18 @@ export function convertResponseContentToChatGenerationChunk(
|
|||
type: 'codeExecutionResult',
|
||||
codeExecutionResult: p.codeExecutionResult
|
||||
}
|
||||
} else if ('inlineData' in p && p.inlineData) {
|
||||
// Extract inline image data for processing by Agent
|
||||
inlineDataItems.push({
|
||||
type: 'gemini_inline_data',
|
||||
mimeType: p.inlineData.mimeType,
|
||||
data: p.inlineData.data
|
||||
})
|
||||
// Return the inline data as part of the content structure
|
||||
return {
|
||||
type: 'inlineData',
|
||||
inlineData: p.inlineData
|
||||
}
|
||||
}
|
||||
return p
|
||||
})
|
||||
|
|
@ -580,6 +616,12 @@ export function convertResponseContentToChatGenerationChunk(
|
|||
)
|
||||
}
|
||||
|
||||
// Build response_metadata with inline data if present
|
||||
const response_metadata: any = {}
|
||||
if (inlineDataItems.length > 0) {
|
||||
response_metadata.inlineData = inlineDataItems
|
||||
}
|
||||
|
||||
return new ChatGenerationChunk({
|
||||
text,
|
||||
message: new AIMessageChunk({
|
||||
|
|
@ -589,7 +631,8 @@ export function convertResponseContentToChatGenerationChunk(
|
|||
// Each chunk can have unique "generationInfo", and merging strategy is unclear,
|
||||
// so leave blank for now.
|
||||
additional_kwargs: {},
|
||||
usage_metadata: extra.usageMetadata
|
||||
usage_metadata: extra.usageMetadata,
|
||||
response_metadata: Object.keys(response_metadata).length > 0 ? response_metadata : undefined
|
||||
}),
|
||||
generationInfo
|
||||
})
|
||||
|
|
|
|||
|
|
@ -41,15 +41,17 @@ class ChatHuggingFace_ChatModels implements INode {
|
|||
label: 'Model',
|
||||
name: 'model',
|
||||
type: 'string',
|
||||
description: 'If using own inference endpoint, leave this blank',
|
||||
placeholder: 'gpt2'
|
||||
description:
|
||||
'Model name (e.g., deepseek-ai/DeepSeek-V3.2-Exp:novita). If model includes provider (:) or using router endpoint, leave Endpoint blank.',
|
||||
placeholder: 'deepseek-ai/DeepSeek-V3.2-Exp:novita'
|
||||
},
|
||||
{
|
||||
label: 'Endpoint',
|
||||
name: 'endpoint',
|
||||
type: 'string',
|
||||
placeholder: 'https://xyz.eu-west-1.aws.endpoints.huggingface.cloud/gpt2',
|
||||
description: 'Using your own inference endpoint',
|
||||
description:
|
||||
'Custom inference endpoint (optional). Not needed for models with providers (:) or router endpoints. Leave blank to use Inference Providers.',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
|
|
@ -103,7 +105,7 @@ class ChatHuggingFace_ChatModels implements INode {
|
|||
type: 'string',
|
||||
rows: 4,
|
||||
placeholder: 'AI assistant:',
|
||||
description: 'Sets the stop sequences to use. Use comma to seperate different sequences.',
|
||||
description: 'Sets the stop sequences to use. Use comma to separate different sequences.',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
|
|
@ -124,6 +126,15 @@ class ChatHuggingFace_ChatModels implements INode {
|
|||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const huggingFaceApiKey = getCredentialParam('huggingFaceApiKey', credentialData, nodeData)
|
||||
|
||||
if (!huggingFaceApiKey) {
|
||||
console.error('[ChatHuggingFace] API key validation failed: No API key found')
|
||||
throw new Error('HuggingFace API key is required. Please configure it in the credential settings.')
|
||||
}
|
||||
|
||||
if (!huggingFaceApiKey.startsWith('hf_')) {
|
||||
console.warn('[ChatHuggingFace] API key format warning: Key does not start with "hf_"')
|
||||
}
|
||||
|
||||
const obj: Partial<HFInput> = {
|
||||
model,
|
||||
apiKey: huggingFaceApiKey
|
||||
|
|
|
|||
|
|
@ -56,9 +56,9 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
|||
this.apiKey = fields?.apiKey ?? getEnvironmentVariable('HUGGINGFACEHUB_API_KEY')
|
||||
this.endpointUrl = fields?.endpointUrl
|
||||
this.includeCredentials = fields?.includeCredentials
|
||||
if (!this.apiKey) {
|
||||
if (!this.apiKey || this.apiKey.trim() === '') {
|
||||
throw new Error(
|
||||
'Please set an API key for HuggingFace Hub in the environment variable HUGGINGFACEHUB_API_KEY or in the apiKey field of the HuggingFaceInference constructor.'
|
||||
'Please set an API key for HuggingFace Hub. Either configure it in the credential settings in the UI, or set the environment variable HUGGINGFACEHUB_API_KEY.'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -68,19 +68,21 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
|||
}
|
||||
|
||||
invocationParams(options?: this['ParsedCallOptions']) {
|
||||
return {
|
||||
model: this.model,
|
||||
parameters: {
|
||||
// make it behave similar to openai, returning only the generated text
|
||||
return_full_text: false,
|
||||
temperature: this.temperature,
|
||||
max_new_tokens: this.maxTokens,
|
||||
stop: options?.stop ?? this.stopSequences,
|
||||
top_p: this.topP,
|
||||
top_k: this.topK,
|
||||
repetition_penalty: this.frequencyPenalty
|
||||
}
|
||||
// Return parameters compatible with chatCompletion API (OpenAI-compatible format)
|
||||
const params: any = {
|
||||
temperature: this.temperature,
|
||||
max_tokens: this.maxTokens,
|
||||
stop: options?.stop ?? this.stopSequences,
|
||||
top_p: this.topP
|
||||
}
|
||||
// Include optional parameters if they are defined
|
||||
if (this.topK !== undefined) {
|
||||
params.top_k = this.topK
|
||||
}
|
||||
if (this.frequencyPenalty !== undefined) {
|
||||
params.frequency_penalty = this.frequencyPenalty
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
async *_streamResponseChunks(
|
||||
|
|
@ -88,51 +90,109 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
|||
options: this['ParsedCallOptions'],
|
||||
runManager?: CallbackManagerForLLMRun
|
||||
): AsyncGenerator<GenerationChunk> {
|
||||
const hfi = await this._prepareHFInference()
|
||||
const stream = await this.caller.call(async () =>
|
||||
hfi.textGenerationStream({
|
||||
...this.invocationParams(options),
|
||||
inputs: prompt
|
||||
})
|
||||
)
|
||||
for await (const chunk of stream) {
|
||||
const token = chunk.token.text
|
||||
yield new GenerationChunk({ text: token, generationInfo: chunk })
|
||||
await runManager?.handleLLMNewToken(token ?? '')
|
||||
|
||||
// stream is done
|
||||
if (chunk.generated_text)
|
||||
yield new GenerationChunk({
|
||||
text: '',
|
||||
generationInfo: { finished: true }
|
||||
try {
|
||||
const client = await this._prepareHFInference()
|
||||
const stream = await this.caller.call(async () =>
|
||||
client.chatCompletionStream({
|
||||
model: this.model,
|
||||
messages: [{ role: 'user', content: prompt }],
|
||||
...this.invocationParams(options)
|
||||
})
|
||||
)
|
||||
for await (const chunk of stream) {
|
||||
const token = chunk.choices[0]?.delta?.content || ''
|
||||
if (token) {
|
||||
yield new GenerationChunk({ text: token, generationInfo: chunk })
|
||||
await runManager?.handleLLMNewToken(token)
|
||||
}
|
||||
// stream is done when finish_reason is set
|
||||
if (chunk.choices[0]?.finish_reason) {
|
||||
yield new GenerationChunk({
|
||||
text: '',
|
||||
generationInfo: { finished: true }
|
||||
})
|
||||
break
|
||||
}
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.error('[ChatHuggingFace] Error in _streamResponseChunks:', error)
|
||||
// Provide more helpful error messages
|
||||
if (error?.message?.includes('endpointUrl') || error?.message?.includes('third-party provider')) {
|
||||
throw new Error(
|
||||
`Cannot use custom endpoint with model "${this.model}" that includes a provider. Please leave the Endpoint field blank in the UI. Original error: ${error.message}`
|
||||
)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
async _call(prompt: string, options: this['ParsedCallOptions']): Promise<string> {
|
||||
const hfi = await this._prepareHFInference()
|
||||
const args = { ...this.invocationParams(options), inputs: prompt }
|
||||
const res = await this.caller.callWithOptions({ signal: options.signal }, hfi.textGeneration.bind(hfi), args)
|
||||
return res.generated_text
|
||||
try {
|
||||
const client = await this._prepareHFInference()
|
||||
// Use chatCompletion for chat models (v4 supports conversational models via Inference Providers)
|
||||
const args = {
|
||||
model: this.model,
|
||||
messages: [{ role: 'user', content: prompt }],
|
||||
...this.invocationParams(options)
|
||||
}
|
||||
const res = await this.caller.callWithOptions({ signal: options.signal }, client.chatCompletion.bind(client), args)
|
||||
const content = res.choices[0]?.message?.content || ''
|
||||
if (!content) {
|
||||
console.error('[ChatHuggingFace] No content in response:', JSON.stringify(res))
|
||||
throw new Error(`No content received from HuggingFace API. Response: ${JSON.stringify(res)}`)
|
||||
}
|
||||
return content
|
||||
} catch (error: any) {
|
||||
console.error('[ChatHuggingFace] Error in _call:', error.message)
|
||||
// Provide more helpful error messages
|
||||
if (error?.message?.includes('endpointUrl') || error?.message?.includes('third-party provider')) {
|
||||
throw new Error(
|
||||
`Cannot use custom endpoint with model "${this.model}" that includes a provider. Please leave the Endpoint field blank in the UI. Original error: ${error.message}`
|
||||
)
|
||||
}
|
||||
if (error?.message?.includes('Invalid username or password') || error?.message?.includes('authentication')) {
|
||||
throw new Error(
|
||||
`HuggingFace API authentication failed. Please verify your API key is correct and starts with "hf_". Original error: ${error.message}`
|
||||
)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
private async _prepareHFInference() {
|
||||
const { HfInference } = await HuggingFaceInference.imports()
|
||||
const hfi = new HfInference(this.apiKey, {
|
||||
includeCredentials: this.includeCredentials
|
||||
})
|
||||
return this.endpointUrl ? hfi.endpoint(this.endpointUrl) : hfi
|
||||
if (!this.apiKey || this.apiKey.trim() === '') {
|
||||
console.error('[ChatHuggingFace] API key validation failed: Empty or undefined')
|
||||
throw new Error('HuggingFace API key is required. Please configure it in the credential settings.')
|
||||
}
|
||||
|
||||
const { InferenceClient } = await HuggingFaceInference.imports()
|
||||
// Use InferenceClient for chat models (works better with Inference Providers)
|
||||
const client = new InferenceClient(this.apiKey)
|
||||
|
||||
// Don't override endpoint if model uses a provider (contains ':') or if endpoint is router-based
|
||||
// When using Inference Providers, endpoint should be left blank - InferenceClient handles routing automatically
|
||||
if (
|
||||
this.endpointUrl &&
|
||||
!this.model.includes(':') &&
|
||||
!this.endpointUrl.includes('/v1/chat/completions') &&
|
||||
!this.endpointUrl.includes('router.huggingface.co')
|
||||
) {
|
||||
return client.endpoint(this.endpointUrl)
|
||||
}
|
||||
|
||||
// Return client without endpoint override - InferenceClient will use Inference Providers automatically
|
||||
return client
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
static async imports(): Promise<{
|
||||
HfInference: typeof import('@huggingface/inference').HfInference
|
||||
InferenceClient: typeof import('@huggingface/inference').InferenceClient
|
||||
}> {
|
||||
try {
|
||||
const { HfInference } = await import('@huggingface/inference')
|
||||
return { HfInference }
|
||||
const { InferenceClient } = await import('@huggingface/inference')
|
||||
return { InferenceClient }
|
||||
} catch (e) {
|
||||
throw new Error('Please install huggingface as a dependency with, e.g. `pnpm install @huggingface/inference`')
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import { ChatOpenAI, ChatOpenAIFields } from '@langchain/openai'
|
||||
import { ChatOpenAI as LangchainChatOpenAI, ChatOpenAIFields } from '@langchain/openai'
|
||||
import { BaseCache } from '@langchain/core/caches'
|
||||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ChatOpenRouter } from './FlowiseChatOpenRouter'
|
||||
|
||||
class ChatOpenRouter_ChatModels implements INode {
|
||||
label: string
|
||||
|
|
@ -23,7 +24,7 @@ class ChatOpenRouter_ChatModels implements INode {
|
|||
this.icon = 'openRouter.svg'
|
||||
this.category = 'Chat Models'
|
||||
this.description = 'Wrapper around Open Router Inference API'
|
||||
this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)]
|
||||
this.baseClasses = [this.type, ...getBaseClasses(LangchainChatOpenAI)]
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
@ -114,6 +115,40 @@ class ChatOpenRouter_ChatModels implements INode {
|
|||
type: 'json',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Allow Image Uploads',
|
||||
name: 'allowImageUploads',
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Allow image input. Refer to the <a href="https://docs.flowiseai.com/using-flowise/uploads#image" target="_blank">docs</a> for more details.',
|
||||
default: false,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Image Resolution',
|
||||
description: 'This parameter controls the resolution in which the model views the image.',
|
||||
name: 'imageResolution',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'Low',
|
||||
name: 'low'
|
||||
},
|
||||
{
|
||||
label: 'High',
|
||||
name: 'high'
|
||||
},
|
||||
{
|
||||
label: 'Auto',
|
||||
name: 'auto'
|
||||
}
|
||||
],
|
||||
default: 'low',
|
||||
optional: false,
|
||||
show: {
|
||||
allowImageUploads: true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -130,6 +165,8 @@ class ChatOpenRouter_ChatModels implements INode {
|
|||
const basePath = (nodeData.inputs?.basepath as string) || 'https://openrouter.ai/api/v1'
|
||||
const baseOptions = nodeData.inputs?.baseOptions
|
||||
const cache = nodeData.inputs?.cache as BaseCache
|
||||
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean
|
||||
const imageResolution = nodeData.inputs?.imageResolution as string
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const openRouterApiKey = getCredentialParam('openRouterApiKey', credentialData, nodeData)
|
||||
|
|
@ -155,7 +192,7 @@ class ChatOpenRouter_ChatModels implements INode {
|
|||
try {
|
||||
parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions)
|
||||
} catch (exception) {
|
||||
throw new Error("Invalid JSON in the ChatCerebras's BaseOptions: " + exception)
|
||||
throw new Error("Invalid JSON in the ChatOpenRouter's BaseOptions: " + exception)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -166,7 +203,15 @@ class ChatOpenRouter_ChatModels implements INode {
|
|||
}
|
||||
}
|
||||
|
||||
const model = new ChatOpenAI(obj)
|
||||
const multiModalOption: IMultiModalOption = {
|
||||
image: {
|
||||
allowImageUploads: allowImageUploads ?? false,
|
||||
imageResolution
|
||||
}
|
||||
}
|
||||
|
||||
const model = new ChatOpenRouter(nodeData.id, obj)
|
||||
model.setMultiModalOption(multiModalOption)
|
||||
return model
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
import { ChatOpenAI as LangchainChatOpenAI, ChatOpenAIFields } from '@langchain/openai'
|
||||
import { IMultiModalOption, IVisionChatModal } from '../../../src'
|
||||
|
||||
export class ChatOpenRouter extends LangchainChatOpenAI implements IVisionChatModal {
|
||||
configuredModel: string
|
||||
configuredMaxToken?: number
|
||||
multiModalOption: IMultiModalOption
|
||||
id: string
|
||||
|
||||
constructor(id: string, fields?: ChatOpenAIFields) {
|
||||
super(fields)
|
||||
this.id = id
|
||||
this.configuredModel = fields?.modelName ?? ''
|
||||
this.configuredMaxToken = fields?.maxTokens
|
||||
}
|
||||
|
||||
revertToOriginalModel(): void {
|
||||
this.model = this.configuredModel
|
||||
this.maxTokens = this.configuredMaxToken
|
||||
}
|
||||
|
||||
setMultiModalOption(multiModalOption: IMultiModalOption): void {
|
||||
this.multiModalOption = multiModalOption
|
||||
}
|
||||
|
||||
setVisionModel(): void {
|
||||
// pass - OpenRouter models don't need model switching
|
||||
}
|
||||
}
|
||||
|
|
@ -95,7 +95,7 @@ class API_DocumentLoaders implements INode {
|
|||
type: 'string',
|
||||
rows: 4,
|
||||
description:
|
||||
'Each document loader comes with a default set of metadata keys that are extracted from the document. You can use this field to omit some of the default metadata keys. The value should be a list of keys, seperated by comma. Use * to omit all metadata keys execept the ones you specify in the Additional Metadata field',
|
||||
'Each document loader comes with a default set of metadata keys that are extracted from the document. You can use this field to omit some of the default metadata keys. The value should be a list of keys, separated by comma. Use * to omit all metadata keys except the ones you specify in the Additional Metadata field',
|
||||
placeholder: 'key1, key2, key3.nestedKey1',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
|
|
|
|||
|
|
@ -1,5 +1,12 @@
|
|||
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4.72492 9.35559L6.5 24L15 5.5L6.33616 7.15025C5.30261 7.34712 4.59832 8.3111 4.72492 9.35559Z" fill="#97D700" stroke="#97D700" stroke-width="2" stroke-linejoin="round"/>
|
||||
<path d="M26.6204 20.5943L26.5699 20.6161L19.5 4.5L24.0986 4.14626C25.163 4.06438 26.1041 4.83296 26.2365 5.8923L27.8137 18.5094C27.9241 19.3925 27.4377 20.2422 26.6204 20.5943Z" fill="#71C5E8" stroke="#71C5E8" stroke-width="2" stroke-linejoin="round"/>
|
||||
<path d="M17.5 10L9.5 28L23 22L17.5 10Z" fill="#FF9114" stroke="#FF9114" stroke-width="2" stroke-linejoin="round"/>
|
||||
<svg width="200" height="200" viewBox="0 0 200 200" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_267_4154)">
|
||||
<path d="M114.695 0H196.97C198.643 0 200 1.35671 200 3.03031V128.766C200 131.778 196.083 132.945 194.434 130.425L112.159 4.68953C110.841 2.67412 112.287 0 114.695 0Z" fill="#246DFF"/>
|
||||
<path d="M85.3048 0H3.0303C1.35671 0 0 1.35671 0 3.03031V128.766C0 131.778 3.91698 132.945 5.566 130.425L87.8405 4.68953C89.1593 2.67412 87.7134 0 85.3048 0Z" fill="#20A34E"/>
|
||||
<path d="M98.5909 100.668L5.12683 194.835C3.22886 196.747 4.58334 200 7.27759 200H192.8C195.483 200 196.842 196.77 194.967 194.852L102.908 100.685C101.726 99.4749 99.7824 99.4676 98.5909 100.668Z" fill="#F86606"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_267_4154">
|
||||
<rect width="200" height="200" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
|
|
|||
|
Before Width: | Height: | Size: 653 B After Width: | Height: | Size: 827 B |
|
|
@ -2,7 +2,7 @@ import { TextLoader } from 'langchain/document_loaders/fs/text'
|
|||
import Papa from 'papaparse'
|
||||
|
||||
type CSVLoaderOptions = {
|
||||
// Return specifific column from key (string) or index (integer)
|
||||
// Return specific column from key (string) or index (integer)
|
||||
column?: string | number
|
||||
// Force separator (default: auto detect)
|
||||
separator?: string
|
||||
|
|
|
|||
|
|
@ -119,8 +119,7 @@ class CustomDocumentLoader_DocumentLoaders implements INode {
|
|||
|
||||
try {
|
||||
const response = await executeJavaScriptCode(javascriptFunction, sandbox, {
|
||||
libraries: ['axios'],
|
||||
timeout: 10000
|
||||
libraries: ['axios']
|
||||
})
|
||||
|
||||
if (output === 'document' && Array.isArray(response)) {
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ class Json_DocumentLoaders implements INode {
|
|||
constructor() {
|
||||
this.label = 'Json File'
|
||||
this.name = 'jsonFile'
|
||||
this.version = 3.0
|
||||
this.version = 3.1
|
||||
this.type = 'Document'
|
||||
this.icon = 'json.svg'
|
||||
this.category = 'Document Loaders'
|
||||
|
|
@ -66,6 +66,14 @@ class Json_DocumentLoaders implements INode {
|
|||
type: 'TextSplitter',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Separate by JSON Object (JSON Array)',
|
||||
name: 'separateByObject',
|
||||
type: 'boolean',
|
||||
description: 'If enabled and the file is a JSON Array, each JSON object will be extracted as a chunk',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Pointers Extraction (separated by commas)',
|
||||
name: 'pointersName',
|
||||
|
|
@ -73,7 +81,10 @@ class Json_DocumentLoaders implements INode {
|
|||
description:
|
||||
'Ex: { "key": "value" }, Pointer Extraction = "key", "value" will be extracted as pageContent of the chunk. Use comma to separate multiple pointers',
|
||||
placeholder: 'key1, key2',
|
||||
optional: true
|
||||
optional: true,
|
||||
hide: {
|
||||
separateByObject: true
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Additional Metadata',
|
||||
|
|
@ -122,6 +133,7 @@ class Json_DocumentLoaders implements INode {
|
|||
const pointersName = nodeData.inputs?.pointersName as string
|
||||
const metadata = nodeData.inputs?.metadata
|
||||
const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string
|
||||
const separateByObject = nodeData.inputs?.separateByObject as boolean
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
let omitMetadataKeys: string[] = []
|
||||
|
|
@ -153,7 +165,7 @@ class Json_DocumentLoaders implements INode {
|
|||
if (!file) continue
|
||||
const fileData = await getFileFromStorage(file, orgId, chatflowid)
|
||||
const blob = new Blob([fileData])
|
||||
const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata)
|
||||
const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata, separateByObject)
|
||||
|
||||
if (textSplitter) {
|
||||
let splittedDocs = await loader.load()
|
||||
|
|
@ -176,7 +188,7 @@ class Json_DocumentLoaders implements INode {
|
|||
splitDataURI.pop()
|
||||
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
|
||||
const blob = new Blob([bf])
|
||||
const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata)
|
||||
const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata, separateByObject)
|
||||
|
||||
if (textSplitter) {
|
||||
let splittedDocs = await loader.load()
|
||||
|
|
@ -306,13 +318,20 @@ class TextLoader extends BaseDocumentLoader {
|
|||
class JSONLoader extends TextLoader {
|
||||
public pointers: string[]
|
||||
private metadataMapping: Record<string, string>
|
||||
private separateByObject: boolean
|
||||
|
||||
constructor(filePathOrBlob: string | Blob, pointers: string | string[] = [], metadataMapping: Record<string, string> = {}) {
|
||||
constructor(
|
||||
filePathOrBlob: string | Blob,
|
||||
pointers: string | string[] = [],
|
||||
metadataMapping: Record<string, string> = {},
|
||||
separateByObject: boolean = false
|
||||
) {
|
||||
super(filePathOrBlob)
|
||||
this.pointers = Array.isArray(pointers) ? pointers : [pointers]
|
||||
if (metadataMapping) {
|
||||
this.metadataMapping = typeof metadataMapping === 'object' ? metadataMapping : JSON.parse(metadataMapping)
|
||||
}
|
||||
this.separateByObject = separateByObject
|
||||
}
|
||||
|
||||
protected async parse(raw: string): Promise<Document[]> {
|
||||
|
|
@ -323,14 +342,24 @@ class JSONLoader extends TextLoader {
|
|||
const jsonArray = Array.isArray(json) ? json : [json]
|
||||
|
||||
for (const item of jsonArray) {
|
||||
const content = this.extractContent(item)
|
||||
const metadata = this.extractMetadata(item)
|
||||
|
||||
for (const pageContent of content) {
|
||||
documents.push({
|
||||
pageContent,
|
||||
metadata
|
||||
})
|
||||
if (this.separateByObject) {
|
||||
if (typeof item === 'object' && item !== null && !Array.isArray(item)) {
|
||||
const metadata = this.extractMetadata(item)
|
||||
const pageContent = this.formatObjectAsKeyValue(item)
|
||||
documents.push({
|
||||
pageContent,
|
||||
metadata
|
||||
})
|
||||
}
|
||||
} else {
|
||||
const content = this.extractContent(item)
|
||||
const metadata = this.extractMetadata(item)
|
||||
for (const pageContent of content) {
|
||||
documents.push({
|
||||
pageContent,
|
||||
metadata
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -370,6 +399,30 @@ class JSONLoader extends TextLoader {
|
|||
return metadata
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a JSON object as readable key-value pairs
|
||||
*/
|
||||
private formatObjectAsKeyValue(obj: any, prefix: string = ''): string {
|
||||
const lines: string[] = []
|
||||
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
const fullKey = prefix ? `${prefix}.${key}` : key
|
||||
|
||||
if (value === null || value === undefined) {
|
||||
lines.push(`${fullKey}: ${value}`)
|
||||
} else if (Array.isArray(value)) {
|
||||
lines.push(`${fullKey}: ${JSON.stringify(value)}`)
|
||||
} else if (typeof value === 'object') {
|
||||
// Recursively format nested objects
|
||||
lines.push(this.formatObjectAsKeyValue(value, fullKey))
|
||||
} else {
|
||||
lines.push(`${fullKey}: ${value}`)
|
||||
}
|
||||
}
|
||||
|
||||
return lines.join('\n')
|
||||
}
|
||||
|
||||
/**
|
||||
* If JSON pointers are specified, return all strings below any of them
|
||||
* and exclude all other nodes expect if they match a JSON pointer.
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@ export class OxylabsLoader extends BaseDocumentLoader {
|
|||
const params = {
|
||||
source: this.params.source,
|
||||
geo_location: this.params.geo_location,
|
||||
render: this.params.render,
|
||||
render: this.params.render ? 'html' : null,
|
||||
parse: this.params.parse,
|
||||
user_agent_type: this.params.user_agent_type,
|
||||
markdown: !this.params.parse,
|
||||
|
|
@ -110,11 +110,14 @@ export class OxylabsLoader extends BaseDocumentLoader {
|
|||
|
||||
const response = await this.sendAPIRequest<OxylabsResponse>(params)
|
||||
|
||||
const docs: OxylabsDocument[] = response.data.results.map((result, index) => ({
|
||||
id: `${response.data.job.id.toString()}-${index}`,
|
||||
pageContent: result.content,
|
||||
metadata: {}
|
||||
}))
|
||||
const docs: OxylabsDocument[] = response.data.results.map((result, index) => {
|
||||
const content = typeof result.content === 'string' ? result.content : JSON.stringify(result.content)
|
||||
return {
|
||||
id: `${response.data.job.id.toString()}-${index}`,
|
||||
pageContent: content,
|
||||
metadata: {}
|
||||
}
|
||||
})
|
||||
|
||||
return docs
|
||||
}
|
||||
|
|
|
|||
|
|
@ -190,11 +190,14 @@ class Playwright_DocumentLoaders implements INode {
|
|||
async function playwrightLoader(url: string): Promise<Document[] | undefined> {
|
||||
try {
|
||||
let docs = []
|
||||
|
||||
const executablePath = process.env.PLAYWRIGHT_EXECUTABLE_PATH
|
||||
|
||||
const config: PlaywrightWebBaseLoaderOptions = {
|
||||
launchOptions: {
|
||||
args: ['--no-sandbox'],
|
||||
headless: true,
|
||||
executablePath: process.env.PLAYWRIGHT_EXECUTABLE_FILE_PATH
|
||||
executablePath: executablePath
|
||||
}
|
||||
}
|
||||
if (waitUntilGoToOption) {
|
||||
|
|
|
|||
|
|
@ -181,11 +181,14 @@ class Puppeteer_DocumentLoaders implements INode {
|
|||
async function puppeteerLoader(url: string): Promise<Document[] | undefined> {
|
||||
try {
|
||||
let docs: Document[] = []
|
||||
|
||||
const executablePath = process.env.PUPPETEER_EXECUTABLE_PATH
|
||||
|
||||
const config: PuppeteerWebBaseLoaderOptions = {
|
||||
launchOptions: {
|
||||
args: ['--no-sandbox'],
|
||||
headless: 'new',
|
||||
executablePath: process.env.PUPPETEER_EXECUTABLE_FILE_PATH
|
||||
executablePath: executablePath
|
||||
}
|
||||
}
|
||||
if (waitUntilGoToOption) {
|
||||
|
|
|
|||
|
|
@ -27,8 +27,6 @@ type Element = {
|
|||
}
|
||||
|
||||
export class UnstructuredLoader extends BaseDocumentLoader {
|
||||
public filePath: string
|
||||
|
||||
private apiUrl = process.env.UNSTRUCTURED_API_URL || 'https://api.unstructuredapp.io/general/v0/general'
|
||||
|
||||
private apiKey: string | undefined = process.env.UNSTRUCTURED_API_KEY
|
||||
|
|
@ -138,7 +136,7 @@ export class UnstructuredLoader extends BaseDocumentLoader {
|
|||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to partition file ${this.filePath} with error ${response.status} and message ${await response.text()}`)
|
||||
throw new Error(`Failed to partition file with error ${response.status} and message ${await response.text()}`)
|
||||
}
|
||||
|
||||
const elements = await response.json()
|
||||
|
|
|
|||
|
|
@ -4,15 +4,11 @@ import {
|
|||
UnstructuredLoaderOptions,
|
||||
UnstructuredLoaderStrategy,
|
||||
SkipInferTableTypes,
|
||||
HiResModelName,
|
||||
UnstructuredLoader as LCUnstructuredLoader
|
||||
HiResModelName
|
||||
} from '@langchain/community/document_loaders/fs/unstructured'
|
||||
import { getCredentialData, getCredentialParam, handleEscapeCharacters } from '../../../src/utils'
|
||||
import { getFileFromStorage, INodeOutputsValue } from '../../../src'
|
||||
import { UnstructuredLoader } from './Unstructured'
|
||||
import { isPathTraversal } from '../../../src/validator'
|
||||
import sanitize from 'sanitize-filename'
|
||||
import path from 'path'
|
||||
|
||||
class UnstructuredFile_DocumentLoaders implements INode {
|
||||
label: string
|
||||
|
|
@ -44,17 +40,6 @@ class UnstructuredFile_DocumentLoaders implements INode {
|
|||
optional: true
|
||||
}
|
||||
this.inputs = [
|
||||
/** Deprecated
|
||||
{
|
||||
label: 'File Path',
|
||||
name: 'filePath',
|
||||
type: 'string',
|
||||
placeholder: '',
|
||||
optional: true,
|
||||
warning:
|
||||
'Use the File Upload instead of File path. If file is uploaded, this path is ignored. Path will be deprecated in future releases.'
|
||||
},
|
||||
*/
|
||||
{
|
||||
label: 'Files Upload',
|
||||
name: 'fileObject',
|
||||
|
|
@ -455,7 +440,6 @@ class UnstructuredFile_DocumentLoaders implements INode {
|
|||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const filePath = nodeData.inputs?.filePath as string
|
||||
const unstructuredAPIUrl = nodeData.inputs?.unstructuredAPIUrl as string
|
||||
const strategy = nodeData.inputs?.strategy as UnstructuredLoaderStrategy
|
||||
const encoding = nodeData.inputs?.encoding as string
|
||||
|
|
@ -560,37 +544,8 @@ class UnstructuredFile_DocumentLoaders implements INode {
|
|||
docs.push(...loaderDocs)
|
||||
}
|
||||
}
|
||||
} else if (filePath) {
|
||||
if (!filePath || typeof filePath !== 'string') {
|
||||
throw new Error('Invalid file path format')
|
||||
}
|
||||
|
||||
if (isPathTraversal(filePath)) {
|
||||
throw new Error('Invalid path characters detected in filePath - path traversal not allowed')
|
||||
}
|
||||
|
||||
const parsedPath = path.parse(filePath)
|
||||
const sanitizedFilename = sanitize(parsedPath.base)
|
||||
|
||||
if (!sanitizedFilename || sanitizedFilename.trim() === '') {
|
||||
throw new Error('Invalid filename after sanitization')
|
||||
}
|
||||
|
||||
const sanitizedFilePath = path.join(parsedPath.dir, sanitizedFilename)
|
||||
|
||||
if (!path.isAbsolute(sanitizedFilePath)) {
|
||||
throw new Error('File path must be absolute')
|
||||
}
|
||||
|
||||
if (sanitizedFilePath.includes('..')) {
|
||||
throw new Error('Invalid file path - directory traversal not allowed')
|
||||
}
|
||||
|
||||
const loader = new LCUnstructuredLoader(sanitizedFilePath, obj)
|
||||
const loaderDocs = await loader.load()
|
||||
docs.push(...loaderDocs)
|
||||
} else {
|
||||
throw new Error('File path or File upload is required')
|
||||
throw new Error('File upload is required')
|
||||
}
|
||||
|
||||
if (metadata) {
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
/*
|
||||
* Uncomment this if you want to use the UnstructuredFolder to load a folder from the file system
|
||||
|
||||
import { omit } from 'lodash'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import {
|
||||
|
|
@ -516,3 +519,4 @@ class UnstructuredFolder_DocumentLoaders implements INode {
|
|||
}
|
||||
|
||||
module.exports = { nodeClass: UnstructuredFolder_DocumentLoaders }
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -96,7 +96,7 @@ class AWSBedrockEmbedding_Embeddings implements INode {
|
|||
{
|
||||
label: 'Max AWS API retries',
|
||||
name: 'maxRetries',
|
||||
description: 'This will limit the nubmer of AWS API for Titan model embeddings call retries. Used to avoid throttling.',
|
||||
description: 'This will limit the number of AWS API for Titan model embeddings call retries. Used to avoid throttling.',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
default: 5,
|
||||
|
|
|
|||
|
|
@ -23,24 +23,22 @@ export class HuggingFaceInferenceEmbeddings extends Embeddings implements Huggin
|
|||
this.model = fields?.model ?? 'sentence-transformers/distilbert-base-nli-mean-tokens'
|
||||
this.apiKey = fields?.apiKey ?? getEnvironmentVariable('HUGGINGFACEHUB_API_KEY')
|
||||
this.endpoint = fields?.endpoint ?? ''
|
||||
this.client = new HfInference(this.apiKey)
|
||||
if (this.endpoint) this.client.endpoint(this.endpoint)
|
||||
const hf = new HfInference(this.apiKey)
|
||||
// v4 uses Inference Providers by default; only override if custom endpoint provided
|
||||
this.client = this.endpoint ? hf.endpoint(this.endpoint) : hf
|
||||
}
|
||||
|
||||
async _embed(texts: string[]): Promise<number[][]> {
|
||||
// replace newlines, which can negatively affect performance.
|
||||
const clean = texts.map((text) => text.replace(/\n/g, ' '))
|
||||
const hf = new HfInference(this.apiKey)
|
||||
const obj: any = {
|
||||
inputs: clean
|
||||
}
|
||||
if (this.endpoint) {
|
||||
hf.endpoint(this.endpoint)
|
||||
} else {
|
||||
if (!this.endpoint) {
|
||||
obj.model = this.model
|
||||
}
|
||||
|
||||
const res = await this.caller.callWithOptions({}, hf.featureExtraction.bind(hf), obj)
|
||||
const res = await this.caller.callWithOptions({}, this.client.featureExtraction.bind(this.client), obj)
|
||||
return res as number[][]
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ class SubQuestionQueryEngine_LlamaIndex implements INode {
|
|||
this.icon = 'subQueryEngine.svg'
|
||||
this.category = 'Engine'
|
||||
this.description =
|
||||
'Breaks complex query into sub questions for each relevant data source, then gather all the intermediate reponses and synthesizes a final response'
|
||||
'Breaks complex query into sub questions for each relevant data source, then gather all the intermediate responses and synthesizes a final response'
|
||||
this.baseClasses = [this.type, 'BaseQueryEngine']
|
||||
this.tags = ['LlamaIndex']
|
||||
this.inputs = [
|
||||
|
|
|
|||
|
|
@ -78,6 +78,8 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
|||
async _call(prompt: string, options: this['ParsedCallOptions']): Promise<string> {
|
||||
const { HfInference } = await HuggingFaceInference.imports()
|
||||
const hf = new HfInference(this.apiKey)
|
||||
// v4 uses Inference Providers by default; only override if custom endpoint provided
|
||||
const hfClient = this.endpoint ? hf.endpoint(this.endpoint) : hf
|
||||
const obj: any = {
|
||||
parameters: {
|
||||
// make it behave similar to openai, returning only the generated text
|
||||
|
|
@ -90,12 +92,10 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
|||
},
|
||||
inputs: prompt
|
||||
}
|
||||
if (this.endpoint) {
|
||||
hf.endpoint(this.endpoint)
|
||||
} else {
|
||||
if (!this.endpoint) {
|
||||
obj.model = this.model
|
||||
}
|
||||
const res = await this.caller.callWithOptions({ signal: options.signal }, hf.textGeneration.bind(hf), obj)
|
||||
const res = await this.caller.callWithOptions({ signal: options.signal }, hfClient.textGeneration.bind(hfClient), obj)
|
||||
return res.generated_text
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ import { ChatOpenAI } from '../../chatmodels/ChatOpenAI/FlowiseChatOpenAI'
|
|||
import { ChatAnthropic } from '../../chatmodels/ChatAnthropic/FlowiseChatAnthropic'
|
||||
import { addImagesToMessages, llmSupportsVision } from '../../../src/multiModalUtils'
|
||||
import { ChatGoogleGenerativeAI } from '../../chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI'
|
||||
import { AzureChatOpenAI } from '../../chatmodels/AzureChatOpenAI/FlowiseAzureChatOpenAI'
|
||||
|
||||
const sysPrompt = `You are a supervisor tasked with managing a conversation between the following workers: {team_members}.
|
||||
Given the following user request, respond with the worker to act next.
|
||||
|
|
@ -242,7 +243,7 @@ class Supervisor_MultiAgents implements INode {
|
|||
}
|
||||
}
|
||||
})
|
||||
} else if (llm instanceof ChatOpenAI) {
|
||||
} else if (llm instanceof ChatOpenAI || llm instanceof AzureChatOpenAI) {
|
||||
let prompt = ChatPromptTemplate.fromMessages([
|
||||
['system', systemPrompt],
|
||||
new MessagesPlaceholder('messages'),
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ return [
|
|||
tool_calls: [
|
||||
{
|
||||
id: "12345",
|
||||
name: "calulator",
|
||||
name: "calculator",
|
||||
args: {
|
||||
number1: 333382,
|
||||
number2: 1932,
|
||||
|
|
@ -130,8 +130,7 @@ class ChatPromptTemplate_Prompts implements INode {
|
|||
|
||||
try {
|
||||
const response = await executeJavaScriptCode(messageHistoryCode, sandbox, {
|
||||
libraries: ['axios', '@langchain/core'],
|
||||
timeout: 10000
|
||||
libraries: ['axios', '@langchain/core']
|
||||
})
|
||||
|
||||
const parsedResponse = JSON.parse(response)
|
||||
|
|
|
|||
|
|
@ -62,7 +62,6 @@ class MySQLRecordManager_RecordManager implements INode {
|
|||
label: 'Namespace',
|
||||
name: 'namespace',
|
||||
type: 'string',
|
||||
description: 'If not specified, chatflowid will be used',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
|
|
@ -205,8 +204,8 @@ class MySQLRecordManager implements RecordManagerInterface {
|
|||
}
|
||||
|
||||
async createSchema(): Promise<void> {
|
||||
const dataSource = await this.getDataSource()
|
||||
try {
|
||||
const dataSource = await this.getDataSource()
|
||||
const queryRunner = dataSource.createQueryRunner()
|
||||
const tableName = this.sanitizeTableName(this.tableName)
|
||||
|
||||
|
|
@ -219,7 +218,16 @@ class MySQLRecordManager implements RecordManagerInterface {
|
|||
unique key \`unique_key_namespace\` (\`key\`,
|
||||
\`namespace\`));`)
|
||||
|
||||
const columns = [`updated_at`, `key`, `namespace`, `group_id`]
|
||||
// Add doc_id column if it doesn't exist (migration for existing tables)
|
||||
const checkColumn = await queryRunner.manager.query(
|
||||
`SELECT COUNT(1) ColumnExists FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE table_schema=DATABASE() AND table_name='${tableName}' AND column_name='doc_id';`
|
||||
)
|
||||
if (checkColumn[0].ColumnExists === 0) {
|
||||
await queryRunner.manager.query(`ALTER TABLE \`${tableName}\` ADD COLUMN \`doc_id\` longtext;`)
|
||||
}
|
||||
|
||||
const columns = [`updated_at`, `key`, `namespace`, `group_id`, `doc_id`]
|
||||
for (const column of columns) {
|
||||
// MySQL does not support 'IF NOT EXISTS' function for Index
|
||||
const Check = await queryRunner.manager.query(
|
||||
|
|
@ -241,6 +249,8 @@ class MySQLRecordManager implements RecordManagerInterface {
|
|||
return
|
||||
}
|
||||
throw e
|
||||
} finally {
|
||||
await dataSource.destroy()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -259,7 +269,7 @@ class MySQLRecordManager implements RecordManagerInterface {
|
|||
}
|
||||
}
|
||||
|
||||
async update(keys: string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||
async update(keys: Array<{ uid: string; docId: string }> | string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||
if (keys.length === 0) {
|
||||
return
|
||||
}
|
||||
|
|
@ -275,23 +285,23 @@ class MySQLRecordManager implements RecordManagerInterface {
|
|||
throw new Error(`Time sync issue with database ${updatedAt} < ${timeAtLeast}`)
|
||||
}
|
||||
|
||||
const groupIds = _groupIds ?? keys.map(() => null)
|
||||
// Handle both new format (objects with uid and docId) and old format (strings)
|
||||
const isNewFormat = keys.length > 0 && typeof keys[0] === 'object' && 'uid' in keys[0]
|
||||
const keyStrings = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.uid) : (keys as string[])
|
||||
const docIds = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.docId) : keys.map(() => null)
|
||||
|
||||
if (groupIds.length !== keys.length) {
|
||||
throw new Error(`Number of keys (${keys.length}) does not match number of group_ids (${groupIds.length})`)
|
||||
const groupIds = _groupIds ?? keyStrings.map(() => null)
|
||||
|
||||
if (groupIds.length !== keyStrings.length) {
|
||||
throw new Error(`Number of keys (${keyStrings.length}) does not match number of group_ids (${groupIds.length})`)
|
||||
}
|
||||
|
||||
const recordsToUpsert = keys.map((key, i) => [
|
||||
key,
|
||||
this.namespace,
|
||||
updatedAt,
|
||||
groupIds[i] ?? null // Ensure groupIds[i] is null if undefined
|
||||
])
|
||||
const recordsToUpsert = keyStrings.map((key, i) => [key, this.namespace, updatedAt, groupIds[i] ?? null, docIds[i] ?? null])
|
||||
|
||||
const query = `
|
||||
INSERT INTO \`${tableName}\` (\`key\`, \`namespace\`, \`updated_at\`, \`group_id\`)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE \`updated_at\` = VALUES(\`updated_at\`)`
|
||||
INSERT INTO \`${tableName}\` (\`key\`, \`namespace\`, \`updated_at\`, \`group_id\`, \`doc_id\`)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE \`updated_at\` = VALUES(\`updated_at\`), \`doc_id\` = VALUES(\`doc_id\`)`
|
||||
|
||||
// To handle multiple files upsert
|
||||
try {
|
||||
|
|
@ -347,13 +357,13 @@ class MySQLRecordManager implements RecordManagerInterface {
|
|||
}
|
||||
}
|
||||
|
||||
async listKeys(options?: ListKeyOptions): Promise<string[]> {
|
||||
async listKeys(options?: ListKeyOptions & { docId?: string }): Promise<string[]> {
|
||||
const dataSource = await this.getDataSource()
|
||||
const queryRunner = dataSource.createQueryRunner()
|
||||
const tableName = this.sanitizeTableName(this.tableName)
|
||||
|
||||
try {
|
||||
const { before, after, limit, groupIds } = options ?? {}
|
||||
const { before, after, limit, groupIds, docId } = options ?? {}
|
||||
let query = `SELECT \`key\` FROM \`${tableName}\` WHERE \`namespace\` = ?`
|
||||
const values: (string | number | string[])[] = [this.namespace]
|
||||
|
||||
|
|
@ -380,6 +390,11 @@ class MySQLRecordManager implements RecordManagerInterface {
|
|||
values.push(...groupIds.filter((gid): gid is string => gid !== null))
|
||||
}
|
||||
|
||||
if (docId) {
|
||||
query += ` AND \`doc_id\` = ?`
|
||||
values.push(docId)
|
||||
}
|
||||
|
||||
query += ';'
|
||||
|
||||
// Directly using try/catch with async/await for cleaner flow
|
||||
|
|
|
|||
|
|
@ -78,7 +78,6 @@ class PostgresRecordManager_RecordManager implements INode {
|
|||
label: 'Namespace',
|
||||
name: 'namespace',
|
||||
type: 'string',
|
||||
description: 'If not specified, chatflowid will be used',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
|
|
@ -222,8 +221,8 @@ class PostgresRecordManager implements RecordManagerInterface {
|
|||
}
|
||||
|
||||
async createSchema(): Promise<void> {
|
||||
const dataSource = await this.getDataSource()
|
||||
try {
|
||||
const dataSource = await this.getDataSource()
|
||||
const queryRunner = dataSource.createQueryRunner()
|
||||
const tableName = this.sanitizeTableName(this.tableName)
|
||||
|
||||
|
|
@ -241,6 +240,19 @@ class PostgresRecordManager implements RecordManagerInterface {
|
|||
CREATE INDEX IF NOT EXISTS namespace_index ON "${tableName}" (namespace);
|
||||
CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
||||
|
||||
// Add doc_id column if it doesn't exist (migration for existing tables)
|
||||
await queryRunner.manager.query(`
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = '${tableName}' AND column_name = 'doc_id'
|
||||
) THEN
|
||||
ALTER TABLE "${tableName}" ADD COLUMN doc_id TEXT;
|
||||
CREATE INDEX IF NOT EXISTS doc_id_index ON "${tableName}" (doc_id);
|
||||
END IF;
|
||||
END $$;`)
|
||||
|
||||
await queryRunner.release()
|
||||
} catch (e: any) {
|
||||
// This error indicates that the table already exists
|
||||
|
|
@ -251,6 +263,8 @@ class PostgresRecordManager implements RecordManagerInterface {
|
|||
return
|
||||
}
|
||||
throw e
|
||||
} finally {
|
||||
await dataSource.destroy()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -284,7 +298,7 @@ class PostgresRecordManager implements RecordManagerInterface {
|
|||
return `(${placeholders.join(', ')})`
|
||||
}
|
||||
|
||||
async update(keys: string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||
async update(keys: Array<{ uid: string; docId: string }> | string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||
if (keys.length === 0) {
|
||||
return
|
||||
}
|
||||
|
|
@ -300,17 +314,22 @@ class PostgresRecordManager implements RecordManagerInterface {
|
|||
throw new Error(`Time sync issue with database ${updatedAt} < ${timeAtLeast}`)
|
||||
}
|
||||
|
||||
const groupIds = _groupIds ?? keys.map(() => null)
|
||||
// Handle both new format (objects with uid and docId) and old format (strings)
|
||||
const isNewFormat = keys.length > 0 && typeof keys[0] === 'object' && 'uid' in keys[0]
|
||||
const keyStrings = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.uid) : (keys as string[])
|
||||
const docIds = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.docId) : keys.map(() => null)
|
||||
|
||||
if (groupIds.length !== keys.length) {
|
||||
throw new Error(`Number of keys (${keys.length}) does not match number of group_ids ${groupIds.length})`)
|
||||
const groupIds = _groupIds ?? keyStrings.map(() => null)
|
||||
|
||||
if (groupIds.length !== keyStrings.length) {
|
||||
throw new Error(`Number of keys (${keyStrings.length}) does not match number of group_ids ${groupIds.length})`)
|
||||
}
|
||||
|
||||
const recordsToUpsert = keys.map((key, i) => [key, this.namespace, updatedAt, groupIds[i]])
|
||||
const recordsToUpsert = keyStrings.map((key, i) => [key, this.namespace, updatedAt, groupIds[i], docIds[i]])
|
||||
|
||||
const valuesPlaceholders = recordsToUpsert.map((_, j) => this.generatePlaceholderForRowAt(j, recordsToUpsert[0].length)).join(', ')
|
||||
|
||||
const query = `INSERT INTO "${tableName}" (key, namespace, updated_at, group_id) VALUES ${valuesPlaceholders} ON CONFLICT (key, namespace) DO UPDATE SET updated_at = EXCLUDED.updated_at;`
|
||||
const query = `INSERT INTO "${tableName}" (key, namespace, updated_at, group_id, doc_id) VALUES ${valuesPlaceholders} ON CONFLICT (key, namespace) DO UPDATE SET updated_at = EXCLUDED.updated_at, doc_id = EXCLUDED.doc_id;`
|
||||
try {
|
||||
await queryRunner.manager.query(query, recordsToUpsert.flat())
|
||||
await queryRunner.release()
|
||||
|
|
@ -349,8 +368,8 @@ class PostgresRecordManager implements RecordManagerInterface {
|
|||
}
|
||||
}
|
||||
|
||||
async listKeys(options?: ListKeyOptions): Promise<string[]> {
|
||||
const { before, after, limit, groupIds } = options ?? {}
|
||||
async listKeys(options?: ListKeyOptions & { docId?: string }): Promise<string[]> {
|
||||
const { before, after, limit, groupIds, docId } = options ?? {}
|
||||
const tableName = this.sanitizeTableName(this.tableName)
|
||||
|
||||
let query = `SELECT key FROM "${tableName}" WHERE namespace = $1`
|
||||
|
|
@ -381,6 +400,12 @@ class PostgresRecordManager implements RecordManagerInterface {
|
|||
index += 1
|
||||
}
|
||||
|
||||
if (docId) {
|
||||
values.push(docId)
|
||||
query += ` AND doc_id = $${index}`
|
||||
index += 1
|
||||
}
|
||||
|
||||
query += ';'
|
||||
|
||||
const dataSource = await this.getDataSource()
|
||||
|
|
|
|||
|
|
@ -51,7 +51,6 @@ class SQLiteRecordManager_RecordManager implements INode {
|
|||
label: 'Namespace',
|
||||
name: 'namespace',
|
||||
type: 'string',
|
||||
description: 'If not specified, chatflowid will be used',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
|
|
@ -179,8 +178,8 @@ class SQLiteRecordManager implements RecordManagerInterface {
|
|||
}
|
||||
|
||||
async createSchema(): Promise<void> {
|
||||
const dataSource = await this.getDataSource()
|
||||
try {
|
||||
const dataSource = await this.getDataSource()
|
||||
const queryRunner = dataSource.createQueryRunner()
|
||||
const tableName = this.sanitizeTableName(this.tableName)
|
||||
|
||||
|
|
@ -198,6 +197,15 @@ CREATE INDEX IF NOT EXISTS key_index ON "${tableName}" (key);
|
|||
CREATE INDEX IF NOT EXISTS namespace_index ON "${tableName}" (namespace);
|
||||
CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
||||
|
||||
// Add doc_id column if it doesn't exist (migration for existing tables)
|
||||
const checkColumn = await queryRunner.manager.query(
|
||||
`SELECT COUNT(*) as count FROM pragma_table_info('${tableName}') WHERE name='doc_id';`
|
||||
)
|
||||
if (checkColumn[0].count === 0) {
|
||||
await queryRunner.manager.query(`ALTER TABLE "${tableName}" ADD COLUMN doc_id TEXT;`)
|
||||
await queryRunner.manager.query(`CREATE INDEX IF NOT EXISTS doc_id_index ON "${tableName}" (doc_id);`)
|
||||
}
|
||||
|
||||
await queryRunner.release()
|
||||
} catch (e: any) {
|
||||
// This error indicates that the table already exists
|
||||
|
|
@ -208,6 +216,8 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
|||
return
|
||||
}
|
||||
throw e
|
||||
} finally {
|
||||
await dataSource.destroy()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -226,7 +236,7 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
|||
}
|
||||
}
|
||||
|
||||
async update(keys: string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||
async update(keys: Array<{ uid: string; docId: string }> | string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||
if (keys.length === 0) {
|
||||
return
|
||||
}
|
||||
|
|
@ -241,23 +251,23 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
|||
throw new Error(`Time sync issue with database ${updatedAt} < ${timeAtLeast}`)
|
||||
}
|
||||
|
||||
const groupIds = _groupIds ?? keys.map(() => null)
|
||||
// Handle both new format (objects with uid and docId) and old format (strings)
|
||||
const isNewFormat = keys.length > 0 && typeof keys[0] === 'object' && 'uid' in keys[0]
|
||||
const keyStrings = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.uid) : (keys as string[])
|
||||
const docIds = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.docId) : keys.map(() => null)
|
||||
|
||||
if (groupIds.length !== keys.length) {
|
||||
throw new Error(`Number of keys (${keys.length}) does not match number of group_ids (${groupIds.length})`)
|
||||
const groupIds = _groupIds ?? keyStrings.map(() => null)
|
||||
|
||||
if (groupIds.length !== keyStrings.length) {
|
||||
throw new Error(`Number of keys (${keyStrings.length}) does not match number of group_ids (${groupIds.length})`)
|
||||
}
|
||||
|
||||
const recordsToUpsert = keys.map((key, i) => [
|
||||
key,
|
||||
this.namespace,
|
||||
updatedAt,
|
||||
groupIds[i] ?? null // Ensure groupIds[i] is null if undefined
|
||||
])
|
||||
const recordsToUpsert = keyStrings.map((key, i) => [key, this.namespace, updatedAt, groupIds[i] ?? null, docIds[i] ?? null])
|
||||
|
||||
const query = `
|
||||
INSERT INTO "${tableName}" (key, namespace, updated_at, group_id)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT (key, namespace) DO UPDATE SET updated_at = excluded.updated_at`
|
||||
INSERT INTO "${tableName}" (key, namespace, updated_at, group_id, doc_id)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON CONFLICT (key, namespace) DO UPDATE SET updated_at = excluded.updated_at, doc_id = excluded.doc_id`
|
||||
|
||||
try {
|
||||
// To handle multiple files upsert
|
||||
|
|
@ -312,8 +322,8 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
|||
}
|
||||
}
|
||||
|
||||
async listKeys(options?: ListKeyOptions): Promise<string[]> {
|
||||
const { before, after, limit, groupIds } = options ?? {}
|
||||
async listKeys(options?: ListKeyOptions & { docId?: string }): Promise<string[]> {
|
||||
const { before, after, limit, groupIds, docId } = options ?? {}
|
||||
const tableName = this.sanitizeTableName(this.tableName)
|
||||
|
||||
let query = `SELECT key FROM "${tableName}" WHERE namespace = ?`
|
||||
|
|
@ -342,6 +352,11 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
|||
values.push(...groupIds.filter((gid): gid is string => gid !== null))
|
||||
}
|
||||
|
||||
if (docId) {
|
||||
query += ` AND doc_id = ?`
|
||||
values.push(docId)
|
||||
}
|
||||
|
||||
query += ';'
|
||||
|
||||
const dataSource = await this.getDataSource()
|
||||
|
|
|
|||
|
|
@ -940,9 +940,7 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom
|
|||
const sandbox = createCodeExecutionSandbox(input, variables, flow)
|
||||
|
||||
try {
|
||||
const response = await executeJavaScriptCode(updateStateMemoryCode, sandbox, {
|
||||
timeout: 10000
|
||||
})
|
||||
const response = await executeJavaScriptCode(updateStateMemoryCode, sandbox)
|
||||
|
||||
if (typeof response !== 'object') throw new Error('Return output must be an object')
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -282,9 +282,7 @@ const runCondition = async (nodeData: INodeData, input: string, options: ICommon
|
|||
const sandbox = createCodeExecutionSandbox(input, variables, flow)
|
||||
|
||||
try {
|
||||
const response = await executeJavaScriptCode(conditionFunction, sandbox, {
|
||||
timeout: 10000
|
||||
})
|
||||
const response = await executeJavaScriptCode(conditionFunction, sandbox)
|
||||
|
||||
if (typeof response !== 'string') throw new Error('Condition function must return a string')
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -549,9 +549,7 @@ const runCondition = async (
|
|||
const sandbox = createCodeExecutionSandbox(input, variables, flow)
|
||||
|
||||
try {
|
||||
const response = await executeJavaScriptCode(conditionFunction, sandbox, {
|
||||
timeout: 10000
|
||||
})
|
||||
const response = await executeJavaScriptCode(conditionFunction, sandbox)
|
||||
|
||||
if (typeof response !== 'string') throw new Error('Condition function must return a string')
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -166,9 +166,7 @@ class CustomFunction_SeqAgents implements INode {
|
|||
const sandbox = createCodeExecutionSandbox(input, variables, flow, additionalSandbox)
|
||||
|
||||
try {
|
||||
const response = await executeJavaScriptCode(javascriptFunction, sandbox, {
|
||||
timeout: 10000
|
||||
})
|
||||
const response = await executeJavaScriptCode(javascriptFunction, sandbox)
|
||||
|
||||
if (returnValueAs === 'stateObj') {
|
||||
if (typeof response !== 'object') {
|
||||
|
|
|
|||
|
|
@ -264,8 +264,7 @@ class ExecuteFlow_SeqAgents implements INode {
|
|||
|
||||
try {
|
||||
let response = await executeJavaScriptCode(code, sandbox, {
|
||||
useSandbox: false,
|
||||
timeout: 10000
|
||||
useSandbox: false
|
||||
})
|
||||
|
||||
if (typeof response === 'object') {
|
||||
|
|
|
|||
|
|
@ -712,9 +712,7 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom
|
|||
const sandbox = createCodeExecutionSandbox(input, variables, flow)
|
||||
|
||||
try {
|
||||
const response = await executeJavaScriptCode(updateStateMemoryCode, sandbox, {
|
||||
timeout: 10000
|
||||
})
|
||||
const response = await executeJavaScriptCode(updateStateMemoryCode, sandbox)
|
||||
|
||||
if (typeof response !== 'object') throw new Error('Return output must be an object')
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -204,9 +204,7 @@ class State_SeqAgents implements INode {
|
|||
const sandbox = createCodeExecutionSandbox('', variables, flow)
|
||||
|
||||
try {
|
||||
const response = await executeJavaScriptCode(`return ${stateMemoryCode}`, sandbox, {
|
||||
timeout: 10000
|
||||
})
|
||||
const response = await executeJavaScriptCode(`return ${stateMemoryCode}`, sandbox)
|
||||
|
||||
if (typeof response !== 'object') throw new Error('State must be an object')
|
||||
const returnOutput: ISeqAgentNode = {
|
||||
|
|
|
|||
|
|
@ -575,9 +575,7 @@ const getReturnOutput = async (
|
|||
const sandbox = createCodeExecutionSandbox(input, variables, flow)
|
||||
|
||||
try {
|
||||
const response = await executeJavaScriptCode(updateStateMemoryCode, sandbox, {
|
||||
timeout: 10000
|
||||
})
|
||||
const response = await executeJavaScriptCode(updateStateMemoryCode, sandbox)
|
||||
|
||||
if (typeof response !== 'object') throw new Error('Return output must be an object')
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -238,7 +238,7 @@ export function filterConversationHistory(
|
|||
export const restructureMessages = (llm: BaseChatModel, state: ISeqAgentsState) => {
|
||||
const messages: BaseMessage[] = []
|
||||
for (const message of state.messages as unknown as BaseMessage[]) {
|
||||
// Sometimes Anthropic can return a message with content types of array, ignore that EXECEPT when tool calls are present
|
||||
// Sometimes Anthropic can return a message with content types of array, ignore that EXCEPT when tool calls are present
|
||||
if ((message as any).tool_calls?.length && message.content !== '') {
|
||||
message.content = JSON.stringify(message.content)
|
||||
}
|
||||
|
|
@ -396,9 +396,7 @@ export const checkMessageHistory = async (
|
|||
const sandbox = createCodeExecutionSandbox('', variables, flow)
|
||||
|
||||
try {
|
||||
const response = await executeJavaScriptCode(messageHistory, sandbox, {
|
||||
timeout: 10000
|
||||
})
|
||||
const response = await executeJavaScriptCode(messageHistory, sandbox)
|
||||
|
||||
if (!Array.isArray(response)) throw new Error('Returned message history must be an array')
|
||||
if (sysPrompt) {
|
||||
|
|
|
|||
|
|
@ -4,7 +4,13 @@ import { RunnableConfig } from '@langchain/core/runnables'
|
|||
import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager'
|
||||
import { StructuredTool } from '@langchain/core/tools'
|
||||
import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getCredentialData, getCredentialParam, executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils'
|
||||
import {
|
||||
getCredentialData,
|
||||
getCredentialParam,
|
||||
executeJavaScriptCode,
|
||||
createCodeExecutionSandbox,
|
||||
parseWithTypeConversion
|
||||
} from '../../../src/utils'
|
||||
import { isValidUUID, isValidURL } from '../../../src/validator'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
|
|
@ -273,7 +279,7 @@ class AgentflowTool extends StructuredTool {
|
|||
}
|
||||
let parsed
|
||||
try {
|
||||
parsed = await this.schema.parseAsync(arg)
|
||||
parsed = await parseWithTypeConversion(this.schema, arg)
|
||||
} catch (e) {
|
||||
throw new Error(`Received tool input did not match expected schema: ${JSON.stringify(arg)}`)
|
||||
}
|
||||
|
|
@ -364,8 +370,7 @@ try {
|
|||
const sandbox = createCodeExecutionSandbox('', [], {}, additionalSandbox)
|
||||
|
||||
let response = await executeJavaScriptCode(code, sandbox, {
|
||||
useSandbox: false,
|
||||
timeout: 10000
|
||||
useSandbox: false
|
||||
})
|
||||
|
||||
if (typeof response === 'object') {
|
||||
|
|
|
|||
|
|
@ -4,7 +4,13 @@ import { RunnableConfig } from '@langchain/core/runnables'
|
|||
import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager'
|
||||
import { StructuredTool } from '@langchain/core/tools'
|
||||
import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getCredentialData, getCredentialParam, executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils'
|
||||
import {
|
||||
getCredentialData,
|
||||
getCredentialParam,
|
||||
executeJavaScriptCode,
|
||||
createCodeExecutionSandbox,
|
||||
parseWithTypeConversion
|
||||
} from '../../../src/utils'
|
||||
import { isValidUUID, isValidURL } from '../../../src/validator'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
|
|
@ -281,7 +287,7 @@ class ChatflowTool extends StructuredTool {
|
|||
}
|
||||
let parsed
|
||||
try {
|
||||
parsed = await this.schema.parseAsync(arg)
|
||||
parsed = await parseWithTypeConversion(this.schema, arg)
|
||||
} catch (e) {
|
||||
throw new Error(`Received tool input did not match expected schema: ${JSON.stringify(arg)}`)
|
||||
}
|
||||
|
|
@ -372,8 +378,7 @@ try {
|
|||
const sandbox = createCodeExecutionSandbox('', [], {}, additionalSandbox)
|
||||
|
||||
let response = await executeJavaScriptCode(code, sandbox, {
|
||||
useSandbox: false,
|
||||
timeout: 10000
|
||||
useSandbox: false
|
||||
})
|
||||
|
||||
if (typeof response === 'object') {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam, parseWithTypeConversion } from '../../../src/utils'
|
||||
import { StructuredTool, ToolInputParsingException, ToolParams } from '@langchain/core/tools'
|
||||
import { Sandbox } from '@e2b/code-interpreter'
|
||||
import { z } from 'zod'
|
||||
|
|
@ -159,7 +159,7 @@ export class E2BTool extends StructuredTool {
|
|||
}
|
||||
let parsed
|
||||
try {
|
||||
parsed = await this.schema.parseAsync(arg)
|
||||
parsed = await parseWithTypeConversion(this.schema, arg)
|
||||
} catch (e) {
|
||||
throw new ToolInputParsingException(`Received tool input did not match expected schema`, JSON.stringify(arg))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import { z } from 'zod'
|
|||
import { RunnableConfig } from '@langchain/core/runnables'
|
||||
import { StructuredTool, ToolParams } from '@langchain/core/tools'
|
||||
import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager'
|
||||
import { executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils'
|
||||
import { executeJavaScriptCode, createCodeExecutionSandbox, parseWithTypeConversion } from '../../../src/utils'
|
||||
import { ICommonObject } from '../../../src/Interface'
|
||||
|
||||
class ToolInputParsingException extends Error {
|
||||
|
|
@ -68,7 +68,7 @@ export class DynamicStructuredTool<
|
|||
}
|
||||
let parsed
|
||||
try {
|
||||
parsed = await this.schema.parseAsync(arg)
|
||||
parsed = await parseWithTypeConversion(this.schema, arg)
|
||||
} catch (e) {
|
||||
throw new ToolInputParsingException(`Received tool input did not match expected schema`, JSON.stringify(arg))
|
||||
}
|
||||
|
|
@ -124,9 +124,7 @@ export class DynamicStructuredTool<
|
|||
|
||||
const sandbox = createCodeExecutionSandbox('', this.variables || [], flow, additionalSandbox)
|
||||
|
||||
let response = await executeJavaScriptCode(this.code, sandbox, {
|
||||
timeout: 10000
|
||||
})
|
||||
let response = await executeJavaScriptCode(this.code, sandbox)
|
||||
|
||||
if (typeof response === 'object') {
|
||||
response = JSON.stringify(response)
|
||||
|
|
|
|||
|
|
@ -272,6 +272,22 @@ class GoogleCalendar_Tools implements INode {
|
|||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Send Updates to',
|
||||
name: 'sendUpdates',
|
||||
type: 'options',
|
||||
description: 'Send Updates to attendees',
|
||||
options: [
|
||||
{ label: 'All', name: 'all' },
|
||||
{ label: 'External Only', name: 'externalOnly' },
|
||||
{ label: 'None', name: 'none' }
|
||||
],
|
||||
show: {
|
||||
eventActions: ['createEvent', 'updateEvent']
|
||||
},
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Recurrence Rules',
|
||||
name: 'recurrence',
|
||||
|
|
@ -560,7 +576,6 @@ class GoogleCalendar_Tools implements INode {
|
|||
}
|
||||
|
||||
const defaultParams = this.transformNodeInputsToToolArgs(nodeData)
|
||||
|
||||
const tools = createGoogleCalendarTools({
|
||||
accessToken,
|
||||
actions,
|
||||
|
|
@ -587,6 +602,7 @@ class GoogleCalendar_Tools implements INode {
|
|||
if (nodeData.inputs?.startDate) defaultParams.startDate = nodeData.inputs.startDate
|
||||
if (nodeData.inputs?.endDate) defaultParams.endDate = nodeData.inputs.endDate
|
||||
if (nodeData.inputs?.attendees) defaultParams.attendees = nodeData.inputs.attendees
|
||||
if (nodeData.inputs?.sendUpdates) defaultParams.sendUpdates = nodeData.inputs.sendUpdates
|
||||
if (nodeData.inputs?.recurrence) defaultParams.recurrence = nodeData.inputs.recurrence
|
||||
if (nodeData.inputs?.reminderMinutes) defaultParams.reminderMinutes = nodeData.inputs.reminderMinutes
|
||||
if (nodeData.inputs?.visibility) defaultParams.visibility = nodeData.inputs.visibility
|
||||
|
|
|
|||
|
|
@ -48,6 +48,7 @@ const CreateEventSchema = z.object({
|
|||
endDate: z.string().optional().describe('End date for all-day events (YYYY-MM-DD)'),
|
||||
timeZone: z.string().optional().describe('Time zone (e.g., America/New_York)'),
|
||||
attendees: z.string().optional().describe('Comma-separated list of attendee emails'),
|
||||
sendUpdates: z.enum(['all', 'externalOnly', 'none']).optional().default('all').describe('Whether to send notifications to attendees'),
|
||||
recurrence: z.string().optional().describe('Recurrence rules (RRULE format)'),
|
||||
reminderMinutes: z.number().optional().describe('Minutes before event to send reminder'),
|
||||
visibility: z.enum(['default', 'public', 'private', 'confidential']).optional().describe('Event visibility')
|
||||
|
|
@ -70,6 +71,7 @@ const UpdateEventSchema = z.object({
|
|||
endDate: z.string().optional().describe('Updated end date for all-day events (YYYY-MM-DD)'),
|
||||
timeZone: z.string().optional().describe('Updated time zone'),
|
||||
attendees: z.string().optional().describe('Updated comma-separated list of attendee emails'),
|
||||
sendUpdates: z.enum(['all', 'externalOnly', 'none']).optional().default('all').describe('Whether to send notifications to attendees'),
|
||||
recurrence: z.string().optional().describe('Updated recurrence rules'),
|
||||
reminderMinutes: z.number().optional().describe('Updated reminder minutes'),
|
||||
visibility: z.enum(['default', 'public', 'private', 'confidential']).optional().describe('Updated event visibility')
|
||||
|
|
@ -286,8 +288,11 @@ class CreateEventTool extends BaseGoogleCalendarTool {
|
|||
}
|
||||
|
||||
if (params.visibility) eventData.visibility = params.visibility
|
||||
const queryParams = new URLSearchParams()
|
||||
if (params.sendUpdates) queryParams.append('sendUpdates', params.sendUpdates)
|
||||
|
||||
const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/events?${queryParams.toString()}`
|
||||
|
||||
const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/events`
|
||||
const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'POST', body: eventData, params })
|
||||
return response
|
||||
} catch (error) {
|
||||
|
|
@ -395,8 +400,12 @@ class UpdateEventTool extends BaseGoogleCalendarTool {
|
|||
}
|
||||
|
||||
if (params.visibility) updateData.visibility = params.visibility
|
||||
const queryParams = new URLSearchParams()
|
||||
if (params.sendUpdates) queryParams.append('sendUpdates', params.sendUpdates)
|
||||
|
||||
const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/events/${encodeURIComponent(params.eventId)}`
|
||||
const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/events/${encodeURIComponent(
|
||||
params.eventId
|
||||
)}?${queryParams.toString()}`
|
||||
const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'PUT', body: updateData, params })
|
||||
return response
|
||||
} catch (error) {
|
||||
|
|
|
|||
|
|
@ -1,10 +1,9 @@
|
|||
import { Tool } from '@langchain/core/tools'
|
||||
import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../../src/Interface'
|
||||
import { MCPToolkit } from '../core'
|
||||
import { getVars, prepareSandboxVars } from '../../../../src/utils'
|
||||
import { MCPToolkit, validateMCPServerConfig } from '../core'
|
||||
import { getVars, prepareSandboxVars, parseJsonBody } from '../../../../src/utils'
|
||||
import { DataSource } from 'typeorm'
|
||||
import hash from 'object-hash'
|
||||
import JSON5 from 'json5'
|
||||
|
||||
const mcpServerConfig = `{
|
||||
"command": "npx",
|
||||
|
|
@ -75,8 +74,8 @@ class Custom_MCP implements INode {
|
|||
},
|
||||
placeholder: mcpServerConfig,
|
||||
warning:
|
||||
process.env.CUSTOM_MCP_SECURITY_CHECK === 'true'
|
||||
? 'In next release, only Remote MCP with url is supported. Read more <a href="https://docs.flowiseai.com/tutorials/tools-and-mcp#streamable-http-recommended" target="_blank">here</a>'
|
||||
process.env.CUSTOM_MCP_PROTOCOL === 'sse'
|
||||
? 'Only Remote MCP with url is supported. Read more <a href="https://docs.flowiseai.com/tutorials/tools-and-mcp#streamable-http-recommended" target="_blank">here</a>'
|
||||
: undefined
|
||||
},
|
||||
{
|
||||
|
|
@ -137,17 +136,17 @@ class Custom_MCP implements INode {
|
|||
}
|
||||
|
||||
let sandbox: ICommonObject = {}
|
||||
const workspaceId = options?.searchOptions?.workspaceId?._value || options?.workspaceId
|
||||
|
||||
if (mcpServerConfig.includes('$vars')) {
|
||||
const appDataSource = options.appDataSource as DataSource
|
||||
const databaseEntities = options.databaseEntities as IDatabaseEntity
|
||||
|
||||
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
||||
// If options.workspaceId is not set, create a new options object with the workspaceId for getVars.
|
||||
const optionsWithWorkspaceId = options.workspaceId ? options : { ...options, workspaceId }
|
||||
const variables = await getVars(appDataSource, databaseEntities, nodeData, optionsWithWorkspaceId)
|
||||
sandbox['$vars'] = prepareSandboxVars(variables)
|
||||
}
|
||||
|
||||
const workspaceId = options?.searchOptions?.workspaceId?._value || options?.workspaceId
|
||||
|
||||
let canonicalConfig
|
||||
try {
|
||||
canonicalConfig = JSON.parse(mcpServerConfig)
|
||||
|
|
@ -174,6 +173,14 @@ class Custom_MCP implements INode {
|
|||
serverParams = JSON.parse(serverParamsString)
|
||||
}
|
||||
|
||||
if (process.env.CUSTOM_MCP_SECURITY_CHECK !== 'false') {
|
||||
try {
|
||||
validateMCPServerConfig(serverParams)
|
||||
} catch (error) {
|
||||
throw new Error(`Security validation failed: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Compatible with stdio and SSE
|
||||
let toolkit: MCPToolkit
|
||||
if (process.env.CUSTOM_MCP_PROTOCOL === 'sse') {
|
||||
|
|
@ -262,7 +269,7 @@ function substituteVariablesInString(str: string, sandbox: any): string {
|
|||
|
||||
function convertToValidJSONString(inputString: string) {
|
||||
try {
|
||||
const jsObject = JSON5.parse(inputString)
|
||||
const jsObject = parseJsonBody(inputString)
|
||||
return JSON.stringify(jsObject, null, 2)
|
||||
} catch (error) {
|
||||
console.error('Error converting to JSON:', error)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { Tool } from '@langchain/core/tools'
|
||||
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../../src/Interface'
|
||||
import { getNodeModulesPackagePath } from '../../../../src/utils'
|
||||
import { MCPToolkit, validateArgsForLocalFileAccess } from '../core'
|
||||
import { MCPToolkit, validateMCPServerConfig } from '../core'
|
||||
|
||||
class Supergateway_MCP implements INode {
|
||||
label: string
|
||||
|
|
@ -106,9 +106,9 @@ class Supergateway_MCP implements INode {
|
|||
args: [packagePath, ...processedArgs]
|
||||
}
|
||||
|
||||
if (process.env.CUSTOM_MCP_SECURITY_CHECK === 'true') {
|
||||
if (process.env.CUSTOM_MCP_SECURITY_CHECK !== 'false') {
|
||||
try {
|
||||
validateArgsForLocalFileAccess(processedArgs)
|
||||
validateMCPServerConfig(serverParams)
|
||||
} catch (error) {
|
||||
throw new Error(`Security validation failed: ${error.message}`)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,147 @@
|
|||
import { Tool } from '@langchain/core/tools'
|
||||
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../../src/Interface'
|
||||
import { getCredentialData, getCredentialParam } from '../../../../src/utils'
|
||||
import { MCPToolkit } from '../core'
|
||||
import hash from 'object-hash'
|
||||
|
||||
class Teradata_MCP implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
documentation: string
|
||||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Teradata MCP'
|
||||
this.name = 'teradataMCP'
|
||||
this.version = 1.0
|
||||
this.type = 'Teradata MCP Tool'
|
||||
this.icon = 'teradata.svg'
|
||||
this.category = 'Tools (MCP)'
|
||||
this.description = 'MCP Server for Teradata (remote HTTP streamable)'
|
||||
this.documentation = 'https://github.com/Teradata/teradata-mcp-server'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['teradataTD2Auth', 'teradataBearerToken'],
|
||||
description: 'Needed when using Teradata MCP server with authentication'
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'MCP Server URL',
|
||||
name: 'mcpUrl',
|
||||
type: 'string',
|
||||
placeholder: 'http://teradata-mcp-server:8001/mcp',
|
||||
description: 'URL of your Teradata MCP server',
|
||||
optional: false
|
||||
},
|
||||
{
|
||||
label: 'Bearer Token',
|
||||
name: 'bearerToken',
|
||||
type: 'string',
|
||||
optional: true,
|
||||
description: 'Optional to override Default set credentials'
|
||||
},
|
||||
{
|
||||
label: 'Available Actions',
|
||||
name: 'mcpActions',
|
||||
type: 'asyncMultiOptions',
|
||||
loadMethod: 'listActions',
|
||||
refresh: true
|
||||
}
|
||||
]
|
||||
this.baseClasses = ['Tool']
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
loadMethods = {
|
||||
listActions: async (nodeData: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> => {
|
||||
try {
|
||||
const toolset = await this.getTools(nodeData, options)
|
||||
toolset.sort((a: any, b: any) => a.name.localeCompare(b.name))
|
||||
return toolset.map(({ name, ...rest }) => ({
|
||||
label: name.toUpperCase(),
|
||||
name: name,
|
||||
description: rest.description || name
|
||||
}))
|
||||
} catch (error) {
|
||||
console.error('Error listing actions:', error)
|
||||
return [
|
||||
{
|
||||
label: 'No Available Actions',
|
||||
name: 'error',
|
||||
description: 'No available actions, please check your MCP server URL and credentials, then refresh.'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const tools = await this.getTools(nodeData, options)
|
||||
const _mcpActions = nodeData.inputs?.mcpActions
|
||||
let mcpActions = []
|
||||
if (_mcpActions) {
|
||||
try {
|
||||
mcpActions = typeof _mcpActions === 'string' ? JSON.parse(_mcpActions) : _mcpActions
|
||||
} catch (error) {
|
||||
console.error('Error parsing mcp actions:', error)
|
||||
}
|
||||
}
|
||||
return tools.filter((tool: any) => mcpActions.includes(tool.name))
|
||||
}
|
||||
|
||||
async getTools(nodeData: INodeData, options: ICommonObject): Promise<Tool[]> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const mcpUrl = nodeData.inputs?.mcpUrl || 'http://teradata-mcp-server:8001/mcp'
|
||||
if (!mcpUrl) {
|
||||
throw new Error('Missing MCP Server URL')
|
||||
}
|
||||
// Determine auth method from credentials
|
||||
let serverParams: any = {
|
||||
url: mcpUrl,
|
||||
headers: {}
|
||||
}
|
||||
// Get Bearer token from node input (from agent flow) or credential store
|
||||
const bearerToken = nodeData.inputs?.bearerToken || getCredentialParam('token', credentialData, nodeData)
|
||||
const username = getCredentialParam('tdUsername', credentialData, nodeData)
|
||||
const password = getCredentialParam('tdPassword', credentialData, nodeData)
|
||||
|
||||
if (bearerToken) {
|
||||
serverParams.headers['Authorization'] = `Bearer ${bearerToken}`
|
||||
} else if (username && password) {
|
||||
serverParams.headers['Authorization'] = 'Basic ' + Buffer.from(`${username}:${password}`).toString('base64')
|
||||
} else {
|
||||
throw new Error('Missing credentials: provide Bearer token from flow/credentials OR username/password from credentials')
|
||||
}
|
||||
const workspaceId = options?.searchOptions?.workspaceId?._value || options?.workspaceId || 'tdws_default'
|
||||
let sandbox: ICommonObject = {}
|
||||
const cacheKey = hash({ workspaceId, serverParams, sandbox })
|
||||
if (options.cachePool) {
|
||||
const cachedResult = await options.cachePool.getMCPCache(cacheKey)
|
||||
if (cachedResult) {
|
||||
if (cachedResult.tools.length > 0) {
|
||||
return cachedResult.tools
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Use SSE for remote HTTP MCP servers
|
||||
const toolkit = new MCPToolkit(serverParams, 'sse')
|
||||
await toolkit.initialize()
|
||||
const tools = toolkit.tools ?? []
|
||||
if (options.cachePool) {
|
||||
await options.cachePool.addMCPCache(cacheKey, { toolkit, tools })
|
||||
}
|
||||
return tools as Tool[]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Teradata_MCP }
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
<svg width="64" height="64" viewBox="0 0 64 64" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g filter="url(#filter0_d_15769_12621)">
|
||||
<path d="M49.3232 8H14.6768C13.1984 8 12 9.19843 12 10.6768V45.3232C12 46.8016 13.1984 48 14.6768 48H49.3232C50.8016 48 52 46.8016 52 45.3232V10.6768C52 9.19843 50.8016 8 49.3232 8Z" fill="#FF5F02"/>
|
||||
<path d="M25.098 32.467V15.5882H30.1292V20.2286H35.7465V24.6834H30.1292V32.467C30.1292 34.4794 31.1745 35.1364 32.6447 35.1364H35.7391V39.5863H32.6447C27.4915 39.5814 25.098 37.3369 25.098 32.467Z" fill="white"/>
|
||||
<path d="M37.8688 37.376C37.8688 36.668 38.1501 35.989 38.6507 35.4884C39.1513 34.9878 39.8303 34.7066 40.5383 34.7066C41.2462 34.7066 41.9252 34.9878 42.4258 35.4884C42.9265 35.989 43.2077 36.668 43.2077 37.376C43.2077 38.084 42.9265 38.7629 42.4258 39.2636C41.9252 39.7642 41.2462 40.0454 40.5383 40.0454C39.8303 40.0454 39.1513 39.7642 38.6507 39.2636C38.1501 38.7629 37.8688 38.084 37.8688 37.376Z" fill="white"/>
|
||||
</g>
|
||||
<defs>
|
||||
<filter id="filter0_d_15769_12621" x="0" y="0" width="64" height="64" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
|
||||
<feOffset dy="4"/>
|
||||
<feGaussianBlur stdDeviation="6"/>
|
||||
<feComposite in2="hardAlpha" operator="out"/>
|
||||
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
|
||||
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_15769_12621"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_15769_12621" result="shape"/>
|
||||
</filter>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.6 KiB |
|
|
@ -114,7 +114,7 @@ export class MCPToolkit extends BaseToolkit {
|
|||
const res = await Promise.allSettled(toolsPromises)
|
||||
const errors = res.filter((r) => r.status === 'rejected')
|
||||
if (errors.length !== 0) {
|
||||
console.error('MCP Tools falied to be resolved', errors)
|
||||
console.error('MCP Tools failed to be resolved', errors)
|
||||
}
|
||||
const successes = res.filter((r) => r.status === 'fulfilled').map((r) => r.value)
|
||||
return successes
|
||||
|
|
@ -219,3 +219,67 @@ export const validateArgsForLocalFileAccess = (args: string[]): void => {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const validateCommandInjection = (args: string[]): void => {
|
||||
const dangerousPatterns = [
|
||||
// Shell metacharacters
|
||||
/[;&|`$(){}[\]<>]/,
|
||||
// Command chaining
|
||||
/&&|\|\||;;/,
|
||||
// Redirections
|
||||
/>>|<<|>/,
|
||||
// Backticks and command substitution
|
||||
/`|\$\(/,
|
||||
// Process substitution
|
||||
/<\(|>\(/
|
||||
]
|
||||
|
||||
for (const arg of args) {
|
||||
if (typeof arg !== 'string') continue
|
||||
|
||||
for (const pattern of dangerousPatterns) {
|
||||
if (pattern.test(arg)) {
|
||||
throw new Error(`Argument contains potentially dangerous characters: "${arg}"`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const validateEnvironmentVariables = (env: Record<string, any>): void => {
|
||||
const dangerousEnvVars = ['PATH', 'LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH']
|
||||
|
||||
for (const [key, value] of Object.entries(env)) {
|
||||
if (dangerousEnvVars.includes(key)) {
|
||||
throw new Error(`Environment variable '${key}' modification is not allowed`)
|
||||
}
|
||||
|
||||
if (typeof value === 'string' && value.includes('\0')) {
|
||||
throw new Error(`Environment variable '${key}' contains null byte`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const validateMCPServerConfig = (serverParams: any): void => {
|
||||
// Validate the entire server configuration
|
||||
if (!serverParams || typeof serverParams !== 'object') {
|
||||
throw new Error('Invalid server configuration')
|
||||
}
|
||||
|
||||
// Command allowlist - only allow specific safe commands
|
||||
const allowedCommands = ['node', 'npx', 'python', 'python3', 'docker']
|
||||
|
||||
if (serverParams.command && !allowedCommands.includes(serverParams.command)) {
|
||||
throw new Error(`Command '${serverParams.command}' is not allowed. Allowed commands: ${allowedCommands.join(', ')}`)
|
||||
}
|
||||
|
||||
// Validate arguments if present
|
||||
if (serverParams.args && Array.isArray(serverParams.args)) {
|
||||
validateArgsForLocalFileAccess(serverParams.args)
|
||||
validateCommandInjection(serverParams.args)
|
||||
}
|
||||
|
||||
// Validate environment variables
|
||||
if (serverParams.env) {
|
||||
validateEnvironmentVariables(serverParams.env)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import $RefParser from '@apidevtools/json-schema-ref-parser'
|
|||
import { z, ZodSchema, ZodTypeAny } from 'zod'
|
||||
import { defaultCode, DynamicStructuredTool, howToUseCode } from './core'
|
||||
import { DataSource } from 'typeorm'
|
||||
import fetch from 'node-fetch'
|
||||
|
||||
class OpenAPIToolkit_Tools implements INode {
|
||||
label: string
|
||||
|
|
@ -21,17 +22,64 @@ class OpenAPIToolkit_Tools implements INode {
|
|||
constructor() {
|
||||
this.label = 'OpenAPI Toolkit'
|
||||
this.name = 'openAPIToolkit'
|
||||
this.version = 2.0
|
||||
this.version = 2.1
|
||||
this.type = 'OpenAPIToolkit'
|
||||
this.icon = 'openapi.svg'
|
||||
this.category = 'Tools'
|
||||
this.description = 'Load OpenAPI specification, and converts each API endpoint to a tool'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'YAML File',
|
||||
name: 'yamlFile',
|
||||
label: 'Input Type',
|
||||
name: 'inputType',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'Upload File',
|
||||
name: 'file'
|
||||
},
|
||||
{
|
||||
label: 'Provide Link',
|
||||
name: 'link'
|
||||
}
|
||||
],
|
||||
default: 'file',
|
||||
description: 'Choose how to provide the OpenAPI specification'
|
||||
},
|
||||
{
|
||||
label: 'OpenAPI File',
|
||||
name: 'openApiFile',
|
||||
type: 'file',
|
||||
fileType: '.yaml'
|
||||
fileType: '.yaml,.json',
|
||||
description: 'Upload your OpenAPI specification file (YAML or JSON)',
|
||||
show: {
|
||||
inputType: 'file'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'OpenAPI Link',
|
||||
name: 'openApiLink',
|
||||
type: 'string',
|
||||
placeholder: 'https://api.example.com/openapi.yaml or https://api.example.com/openapi.json',
|
||||
description: 'Provide a link to your OpenAPI specification (YAML or JSON)',
|
||||
show: {
|
||||
inputType: 'link'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Server',
|
||||
name: 'selectedServer',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listServers',
|
||||
description: 'Select which server to use for API calls',
|
||||
refresh: true
|
||||
},
|
||||
{
|
||||
label: 'Available Endpoints',
|
||||
name: 'selectedEndpoints',
|
||||
type: 'asyncMultiOptions',
|
||||
loadMethod: 'listEndpoints',
|
||||
description: 'Select which endpoints to expose as tools',
|
||||
refresh: true
|
||||
},
|
||||
{
|
||||
label: 'Return Direct',
|
||||
|
|
@ -46,8 +94,7 @@ class OpenAPIToolkit_Tools implements INode {
|
|||
type: 'json',
|
||||
description: 'Request headers to be sent with the API request. For example, {"Authorization": "Bearer token"}',
|
||||
additionalParams: true,
|
||||
optional: true,
|
||||
acceptVariable: true
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Remove null parameters',
|
||||
|
|
@ -76,49 +123,237 @@ class OpenAPIToolkit_Tools implements INode {
|
|||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const toolReturnDirect = nodeData.inputs?.returnDirect as boolean
|
||||
const yamlFileBase64 = nodeData.inputs?.yamlFile as string
|
||||
const inputType = nodeData.inputs?.inputType as string
|
||||
const openApiFile = nodeData.inputs?.openApiFile as string
|
||||
const openApiLink = nodeData.inputs?.openApiLink as string
|
||||
const selectedServer = nodeData.inputs?.selectedServer as string
|
||||
const customCode = nodeData.inputs?.customCode as string
|
||||
const _headers = nodeData.inputs?.headers as string
|
||||
const removeNulls = nodeData.inputs?.removeNulls as boolean
|
||||
|
||||
const headers = typeof _headers === 'object' ? _headers : _headers ? JSON.parse(_headers) : {}
|
||||
|
||||
let data
|
||||
if (yamlFileBase64.startsWith('FILE-STORAGE::')) {
|
||||
const file = yamlFileBase64.replace('FILE-STORAGE::', '')
|
||||
const orgId = options.orgId
|
||||
const chatflowid = options.chatflowid
|
||||
const fileData = await getFileFromStorage(file, orgId, chatflowid)
|
||||
const utf8String = fileData.toString('utf-8')
|
||||
const specData = await this.loadOpenApiSpec(
|
||||
{
|
||||
inputType,
|
||||
openApiFile,
|
||||
openApiLink
|
||||
},
|
||||
options
|
||||
)
|
||||
if (!specData) throw new Error('Failed to load OpenAPI spec')
|
||||
|
||||
data = load(utf8String)
|
||||
const _data: any = await $RefParser.dereference(specData)
|
||||
|
||||
// Use selected server or fallback to first server
|
||||
let baseUrl: string
|
||||
if (selectedServer && selectedServer !== 'error') {
|
||||
baseUrl = selectedServer
|
||||
} else {
|
||||
const splitDataURI = yamlFileBase64.split(',')
|
||||
splitDataURI.pop()
|
||||
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
|
||||
const utf8String = bf.toString('utf-8')
|
||||
data = load(utf8String)
|
||||
}
|
||||
if (!data) {
|
||||
throw new Error('Failed to load OpenAPI spec')
|
||||
baseUrl = _data.servers?.[0]?.url
|
||||
}
|
||||
|
||||
const _data: any = await $RefParser.dereference(data)
|
||||
|
||||
const baseUrl = _data.servers[0]?.url
|
||||
if (!baseUrl) {
|
||||
throw new Error('OpenAPI spec does not contain a server URL')
|
||||
}
|
||||
if (!baseUrl) throw new Error('OpenAPI spec does not contain a server URL')
|
||||
|
||||
const appDataSource = options.appDataSource as DataSource
|
||||
const databaseEntities = options.databaseEntities as IDatabaseEntity
|
||||
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
||||
|
||||
const flow = { chatflowId: options.chatflowid }
|
||||
|
||||
const tools = getTools(_data.paths, baseUrl, headers, variables, flow, toolReturnDirect, customCode, removeNulls)
|
||||
let tools = getTools(_data.paths, baseUrl, headers, variables, flow, toolReturnDirect, customCode, removeNulls)
|
||||
|
||||
// Filter by selected endpoints if provided
|
||||
const _selected = nodeData.inputs?.selectedEndpoints
|
||||
let selected: string[] = []
|
||||
if (_selected) {
|
||||
try {
|
||||
selected = typeof _selected === 'string' ? JSON.parse(_selected) : _selected
|
||||
} catch (e) {
|
||||
selected = []
|
||||
}
|
||||
}
|
||||
if (selected.length) {
|
||||
tools = tools.filter((t: any) => selected.includes(t.name))
|
||||
}
|
||||
|
||||
return tools
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
loadMethods = {
|
||||
listServers: async (nodeData: INodeData, options: ICommonObject) => {
|
||||
try {
|
||||
const inputType = nodeData.inputs?.inputType as string
|
||||
const openApiFile = nodeData.inputs?.openApiFile as string
|
||||
const openApiLink = nodeData.inputs?.openApiLink as string
|
||||
const specData: any = await this.loadOpenApiSpec(
|
||||
{
|
||||
inputType,
|
||||
openApiFile,
|
||||
openApiLink
|
||||
},
|
||||
options
|
||||
)
|
||||
if (!specData) return []
|
||||
const _data: any = await $RefParser.dereference(specData)
|
||||
const items: { label: string; name: string; description?: string }[] = []
|
||||
const servers = _data.servers || []
|
||||
|
||||
if (servers.length === 0) {
|
||||
return [
|
||||
{
|
||||
label: 'No Servers Found',
|
||||
name: 'error',
|
||||
description: 'No servers defined in the OpenAPI specification'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
for (let i = 0; i < servers.length; i++) {
|
||||
const server = servers[i]
|
||||
const serverUrl = server.url || `Server ${i + 1}`
|
||||
const serverDesc = server.description || serverUrl
|
||||
items.push({
|
||||
label: serverUrl,
|
||||
name: serverUrl,
|
||||
description: serverDesc
|
||||
})
|
||||
}
|
||||
|
||||
return items
|
||||
} catch (e) {
|
||||
return [
|
||||
{
|
||||
label: 'No Servers Found',
|
||||
name: 'error',
|
||||
description: 'No available servers, check the link/file and refresh'
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
listEndpoints: async (nodeData: INodeData, options: ICommonObject) => {
|
||||
try {
|
||||
const inputType = nodeData.inputs?.inputType as string
|
||||
const openApiFile = nodeData.inputs?.openApiFile as string
|
||||
const openApiLink = nodeData.inputs?.openApiLink as string
|
||||
const specData: any = await this.loadOpenApiSpec(
|
||||
{
|
||||
inputType,
|
||||
openApiFile,
|
||||
openApiLink
|
||||
},
|
||||
options
|
||||
)
|
||||
if (!specData) return []
|
||||
const _data: any = await $RefParser.dereference(specData)
|
||||
const items: { label: string; name: string; description?: string }[] = []
|
||||
const paths = _data.paths || {}
|
||||
for (const path in paths) {
|
||||
const methods = paths[path]
|
||||
for (const method in methods) {
|
||||
if (['get', 'post', 'put', 'delete', 'patch'].includes(method)) {
|
||||
const spec = methods[method]
|
||||
const opId = spec.operationId || `${method.toUpperCase()} ${path}`
|
||||
const desc = spec.description || spec.summary || opId
|
||||
items.push({ label: opId, name: opId, description: desc })
|
||||
}
|
||||
}
|
||||
}
|
||||
items.sort((a, b) => a.label.localeCompare(b.label))
|
||||
return items
|
||||
} catch (e) {
|
||||
return [
|
||||
{
|
||||
label: 'No Endpoints Found',
|
||||
name: 'error',
|
||||
description: 'No available endpoints, check the link/file and refresh'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async loadOpenApiSpec(
|
||||
args: {
|
||||
inputType?: string
|
||||
openApiFile?: string
|
||||
openApiLink?: string
|
||||
},
|
||||
options: ICommonObject
|
||||
): Promise<any | null> {
|
||||
const { inputType = 'file', openApiFile = '', openApiLink = '' } = args
|
||||
try {
|
||||
if (inputType === 'link' && openApiLink) {
|
||||
const res = await fetch(openApiLink)
|
||||
const text = await res.text()
|
||||
|
||||
// Auto-detect format from URL extension or content
|
||||
const isJsonUrl = openApiLink.toLowerCase().includes('.json')
|
||||
const isYamlUrl = openApiLink.toLowerCase().includes('.yaml') || openApiLink.toLowerCase().includes('.yml')
|
||||
|
||||
if (isJsonUrl) {
|
||||
return JSON.parse(text)
|
||||
} else if (isYamlUrl) {
|
||||
return load(text)
|
||||
} else {
|
||||
// Auto-detect format from content
|
||||
try {
|
||||
return JSON.parse(text)
|
||||
} catch (_) {
|
||||
return load(text)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (inputType === 'file' && openApiFile) {
|
||||
let utf8String: string
|
||||
let fileName = ''
|
||||
|
||||
if (openApiFile.startsWith('FILE-STORAGE::')) {
|
||||
const file = openApiFile.replace('FILE-STORAGE::', '')
|
||||
fileName = file
|
||||
const orgId = options.orgId
|
||||
const chatflowid = options.chatflowid
|
||||
const fileData = await getFileFromStorage(file, orgId, chatflowid)
|
||||
utf8String = fileData.toString('utf-8')
|
||||
} else {
|
||||
// Extract filename from data URI if possible
|
||||
const splitDataURI = openApiFile.split(',')
|
||||
const mimeType = splitDataURI[0] || ''
|
||||
if (mimeType.includes('filename=')) {
|
||||
const filenameMatch = mimeType.match(/filename=([^;]+)/)
|
||||
if (filenameMatch) {
|
||||
fileName = filenameMatch[1]
|
||||
}
|
||||
}
|
||||
splitDataURI.pop()
|
||||
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
|
||||
utf8String = bf.toString('utf-8')
|
||||
}
|
||||
|
||||
// Auto-detect format from file extension or content
|
||||
const isJsonFile = fileName.toLowerCase().endsWith('.json')
|
||||
const isYamlFile = fileName.toLowerCase().endsWith('.yaml') || fileName.toLowerCase().endsWith('.yml')
|
||||
|
||||
if (isJsonFile) {
|
||||
return JSON.parse(utf8String)
|
||||
} else if (isYamlFile) {
|
||||
return load(utf8String)
|
||||
} else {
|
||||
// Auto-detect format from content
|
||||
try {
|
||||
return JSON.parse(utf8String)
|
||||
} catch (_) {
|
||||
return load(utf8String)
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error loading OpenAPI spec:', e)
|
||||
return null
|
||||
}
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const jsonSchemaToZodSchema = (schema: any, requiredList: string[], keyName: string): ZodSchema<any> => {
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import { RequestInit } from 'node-fetch'
|
|||
import { RunnableConfig } from '@langchain/core/runnables'
|
||||
import { StructuredTool, ToolParams } from '@langchain/core/tools'
|
||||
import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager'
|
||||
import { executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils'
|
||||
import { executeJavaScriptCode, createCodeExecutionSandbox, parseWithTypeConversion } from '../../../src/utils'
|
||||
import { ICommonObject } from '../../../src/Interface'
|
||||
|
||||
const removeNulls = (obj: Record<string, any>) => {
|
||||
|
|
@ -174,7 +174,7 @@ export class DynamicStructuredTool<
|
|||
}
|
||||
let parsed
|
||||
try {
|
||||
parsed = await this.schema.parseAsync(arg)
|
||||
parsed = await parseWithTypeConversion(this.schema, arg)
|
||||
} catch (e) {
|
||||
throw new ToolInputParsingException(`Received tool input did not match expected schema ${e}`, JSON.stringify(arg))
|
||||
}
|
||||
|
|
@ -253,9 +253,7 @@ export class DynamicStructuredTool<
|
|||
|
||||
const sandbox = createCodeExecutionSandbox('', this.variables || [], flow, additionalSandbox)
|
||||
|
||||
let response = await executeJavaScriptCode(this.customCode || defaultCode, sandbox, {
|
||||
timeout: 10000
|
||||
})
|
||||
let response = await executeJavaScriptCode(this.customCode || defaultCode, sandbox)
|
||||
|
||||
if (typeof response === 'object') {
|
||||
response = JSON.stringify(response)
|
||||
|
|
|
|||
|
|
@ -1,85 +0,0 @@
|
|||
import { z } from 'zod'
|
||||
import { StructuredTool, ToolParams } from '@langchain/core/tools'
|
||||
import { Serializable } from '@langchain/core/load/serializable'
|
||||
import { NodeFileStore } from 'langchain/stores/file/node'
|
||||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
|
||||
abstract class BaseFileStore extends Serializable {
|
||||
abstract readFile(path: string): Promise<string>
|
||||
abstract writeFile(path: string, contents: string): Promise<void>
|
||||
}
|
||||
|
||||
class ReadFile_Tools implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Read File'
|
||||
this.name = 'readFile'
|
||||
this.version = 1.0
|
||||
this.type = 'ReadFile'
|
||||
this.icon = 'readfile.svg'
|
||||
this.category = 'Tools'
|
||||
this.description = 'Read file from disk'
|
||||
this.baseClasses = [this.type, 'Tool', ...getBaseClasses(ReadFileTool)]
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Base Path',
|
||||
name: 'basePath',
|
||||
placeholder: `C:\\Users\\User\\Desktop`,
|
||||
type: 'string',
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
const basePath = nodeData.inputs?.basePath as string
|
||||
const store = basePath ? new NodeFileStore(basePath) : new NodeFileStore()
|
||||
return new ReadFileTool({ store })
|
||||
}
|
||||
}
|
||||
|
||||
interface ReadFileParams extends ToolParams {
|
||||
store: BaseFileStore
|
||||
}
|
||||
|
||||
/**
|
||||
* Class for reading files from the disk. Extends the StructuredTool
|
||||
* class.
|
||||
*/
|
||||
export class ReadFileTool extends StructuredTool {
|
||||
static lc_name() {
|
||||
return 'ReadFileTool'
|
||||
}
|
||||
|
||||
schema = z.object({
|
||||
file_path: z.string().describe('name of file')
|
||||
}) as any
|
||||
|
||||
name = 'read_file'
|
||||
|
||||
description = 'Read file from disk'
|
||||
|
||||
store: BaseFileStore
|
||||
|
||||
constructor({ store }: ReadFileParams) {
|
||||
super(...arguments)
|
||||
|
||||
this.store = store
|
||||
}
|
||||
|
||||
async _call({ file_path }: z.infer<typeof this.schema>) {
|
||||
return await this.store.readFile(file_path)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ReadFile_Tools }
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M18 5H9C7.89543 5 7 5.89543 7 7V25C7 26.1046 7.89543 27 9 27H12M18 5L25 12M18 5V12H25M25 12V25C25 26.1046 24.1046 27 23 27H20" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M16 17V29M16 17L13 20.1361M16 17L19 20.1361" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 455 B |
|
|
@ -1,7 +1,6 @@
|
|||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, stripHTMLFromToolInput } from '../../../src/utils'
|
||||
import { getBaseClasses, stripHTMLFromToolInput, parseJsonBody } from '../../../src/utils'
|
||||
import { desc, RequestParameters, RequestsDeleteTool } from './core'
|
||||
import JSON5 from 'json5'
|
||||
|
||||
const codeExample = `{
|
||||
"id": {
|
||||
|
|
@ -131,7 +130,7 @@ class RequestsDelete_Tools implements INode {
|
|||
if (queryParamsSchema) obj.queryParamsSchema = queryParamsSchema
|
||||
if (maxOutputLength) obj.maxOutputLength = parseInt(maxOutputLength, 10)
|
||||
if (headers) {
|
||||
const parsedHeaders = typeof headers === 'object' ? headers : JSON5.parse(stripHTMLFromToolInput(headers))
|
||||
const parsedHeaders = typeof headers === 'object' ? headers : parseJsonBody(stripHTMLFromToolInput(headers))
|
||||
obj.headers = parsedHeaders
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { z } from 'zod'
|
||||
import { DynamicStructuredTool } from '../OpenAPIToolkit/core'
|
||||
import { secureFetch } from '../../../src/httpSecurity'
|
||||
import JSON5 from 'json5'
|
||||
import { parseJsonBody } from '../../../src/utils'
|
||||
|
||||
export const desc = `Use this when you need to execute a DELETE request to remove data from a website.`
|
||||
|
||||
|
|
@ -23,7 +23,7 @@ const createRequestsDeleteSchema = (queryParamsSchema?: string) => {
|
|||
// If queryParamsSchema is provided, parse it and add dynamic query params
|
||||
if (queryParamsSchema) {
|
||||
try {
|
||||
const parsedSchema = JSON5.parse(queryParamsSchema)
|
||||
const parsedSchema = parseJsonBody(queryParamsSchema)
|
||||
const queryParamsObject: Record<string, z.ZodTypeAny> = {}
|
||||
|
||||
Object.entries(parsedSchema).forEach(([key, config]: [string, any]) => {
|
||||
|
|
@ -109,7 +109,7 @@ export class RequestsDeleteTool extends DynamicStructuredTool {
|
|||
|
||||
if (this.queryParamsSchema && params.queryParams && Object.keys(params.queryParams).length > 0) {
|
||||
try {
|
||||
const parsedSchema = JSON5.parse(this.queryParamsSchema)
|
||||
const parsedSchema = parseJsonBody(this.queryParamsSchema)
|
||||
const pathParams: Array<{ key: string; value: string }> = []
|
||||
|
||||
Object.entries(params.queryParams).forEach(([key, value]) => {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, stripHTMLFromToolInput } from '../../../src/utils'
|
||||
import { getBaseClasses, stripHTMLFromToolInput, parseJsonBody } from '../../../src/utils'
|
||||
import { desc, RequestParameters, RequestsGetTool } from './core'
|
||||
import JSON5 from 'json5'
|
||||
|
||||
const codeExample = `{
|
||||
"id": {
|
||||
|
|
@ -131,7 +130,7 @@ class RequestsGet_Tools implements INode {
|
|||
if (queryParamsSchema) obj.queryParamsSchema = queryParamsSchema
|
||||
if (maxOutputLength) obj.maxOutputLength = parseInt(maxOutputLength, 10)
|
||||
if (headers) {
|
||||
const parsedHeaders = typeof headers === 'object' ? headers : JSON5.parse(stripHTMLFromToolInput(headers))
|
||||
const parsedHeaders = typeof headers === 'object' ? headers : parseJsonBody(stripHTMLFromToolInput(headers))
|
||||
obj.headers = parsedHeaders
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { z } from 'zod'
|
||||
import { DynamicStructuredTool } from '../OpenAPIToolkit/core'
|
||||
import { secureFetch } from '../../../src/httpSecurity'
|
||||
import JSON5 from 'json5'
|
||||
import { parseJsonBody } from '../../../src/utils'
|
||||
|
||||
export const desc = `Use this when you need to execute a GET request to get data from a website.`
|
||||
|
||||
|
|
@ -23,7 +23,7 @@ const createRequestsGetSchema = (queryParamsSchema?: string) => {
|
|||
// If queryParamsSchema is provided, parse it and add dynamic query params
|
||||
if (queryParamsSchema) {
|
||||
try {
|
||||
const parsedSchema = JSON5.parse(queryParamsSchema)
|
||||
const parsedSchema = parseJsonBody(queryParamsSchema)
|
||||
const queryParamsObject: Record<string, z.ZodTypeAny> = {}
|
||||
|
||||
Object.entries(parsedSchema).forEach(([key, config]: [string, any]) => {
|
||||
|
|
@ -109,7 +109,7 @@ export class RequestsGetTool extends DynamicStructuredTool {
|
|||
|
||||
if (this.queryParamsSchema && params.queryParams && Object.keys(params.queryParams).length > 0) {
|
||||
try {
|
||||
const parsedSchema = JSON5.parse(this.queryParamsSchema)
|
||||
const parsedSchema = parseJsonBody(this.queryParamsSchema)
|
||||
const pathParams: Array<{ key: string; value: string }> = []
|
||||
|
||||
Object.entries(params.queryParams).forEach(([key, value]) => {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, stripHTMLFromToolInput } from '../../../src/utils'
|
||||
import { getBaseClasses, stripHTMLFromToolInput, parseJsonBody } from '../../../src/utils'
|
||||
import { RequestParameters, desc, RequestsPostTool } from './core'
|
||||
import JSON5 from 'json5'
|
||||
|
||||
const codeExample = `{
|
||||
"name": {
|
||||
|
|
@ -141,11 +140,11 @@ class RequestsPost_Tools implements INode {
|
|||
if (bodySchema) obj.bodySchema = stripHTMLFromToolInput(bodySchema)
|
||||
if (maxOutputLength) obj.maxOutputLength = parseInt(maxOutputLength, 10)
|
||||
if (headers) {
|
||||
const parsedHeaders = typeof headers === 'object' ? headers : JSON5.parse(stripHTMLFromToolInput(headers))
|
||||
const parsedHeaders = typeof headers === 'object' ? headers : parseJsonBody(stripHTMLFromToolInput(headers))
|
||||
obj.headers = parsedHeaders
|
||||
}
|
||||
if (body) {
|
||||
const parsedBody = typeof body === 'object' ? body : JSON5.parse(body)
|
||||
const parsedBody = typeof body === 'object' ? body : parseJsonBody(body)
|
||||
obj.body = parsedBody
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { z } from 'zod'
|
||||
import { DynamicStructuredTool } from '../OpenAPIToolkit/core'
|
||||
import { secureFetch } from '../../../src/httpSecurity'
|
||||
import JSON5 from 'json5'
|
||||
import { parseJsonBody } from '../../../src/utils'
|
||||
|
||||
export const desc = `Use this when you want to execute a POST request to create or update a resource.`
|
||||
|
||||
|
|
@ -28,7 +28,7 @@ const createRequestsPostSchema = (bodySchema?: string) => {
|
|||
// If bodySchema is provided, parse it and add dynamic body params
|
||||
if (bodySchema) {
|
||||
try {
|
||||
const parsedSchema = JSON5.parse(bodySchema)
|
||||
const parsedSchema = parseJsonBody(bodySchema)
|
||||
const bodyParamsObject: Record<string, z.ZodTypeAny> = {}
|
||||
|
||||
Object.entries(parsedSchema).forEach(([key, config]: [string, any]) => {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, stripHTMLFromToolInput } from '../../../src/utils'
|
||||
import { getBaseClasses, stripHTMLFromToolInput, parseJsonBody } from '../../../src/utils'
|
||||
import { RequestParameters, desc, RequestsPutTool } from './core'
|
||||
import JSON5 from 'json5'
|
||||
|
||||
const codeExample = `{
|
||||
"name": {
|
||||
|
|
@ -141,11 +140,11 @@ class RequestsPut_Tools implements INode {
|
|||
if (bodySchema) obj.bodySchema = stripHTMLFromToolInput(bodySchema)
|
||||
if (maxOutputLength) obj.maxOutputLength = parseInt(maxOutputLength, 10)
|
||||
if (headers) {
|
||||
const parsedHeaders = typeof headers === 'object' ? headers : JSON5.parse(stripHTMLFromToolInput(headers))
|
||||
const parsedHeaders = typeof headers === 'object' ? headers : parseJsonBody(stripHTMLFromToolInput(headers))
|
||||
obj.headers = parsedHeaders
|
||||
}
|
||||
if (body) {
|
||||
const parsedBody = typeof body === 'object' ? body : JSON5.parse(body)
|
||||
const parsedBody = typeof body === 'object' ? body : parseJsonBody(body)
|
||||
obj.body = parsedBody
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { z } from 'zod'
|
||||
import { DynamicStructuredTool } from '../OpenAPIToolkit/core'
|
||||
import { secureFetch } from '../../../src/httpSecurity'
|
||||
import JSON5 from 'json5'
|
||||
import { parseJsonBody } from '../../../src/utils'
|
||||
|
||||
export const desc = `Use this when you want to execute a PUT request to update or replace a resource.`
|
||||
|
||||
|
|
@ -28,7 +28,7 @@ const createRequestsPutSchema = (bodySchema?: string) => {
|
|||
// If bodySchema is provided, parse it and add dynamic body params
|
||||
if (bodySchema) {
|
||||
try {
|
||||
const parsedSchema = JSON5.parse(bodySchema)
|
||||
const parsedSchema = parseJsonBody(bodySchema)
|
||||
const bodyParamsObject: Record<string, z.ZodTypeAny> = {}
|
||||
|
||||
Object.entries(parsedSchema).forEach(([key, config]: [string, any]) => {
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import { CallbackManager, CallbackManagerForToolRun, Callbacks, parseCallbackCon
|
|||
import { BaseDynamicToolInput, DynamicTool, StructuredTool, ToolInputParsingException } from '@langchain/core/tools'
|
||||
import { BaseRetriever } from '@langchain/core/retrievers'
|
||||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, resolveFlowObjValue } from '../../../src/utils'
|
||||
import { getBaseClasses, resolveFlowObjValue, parseWithTypeConversion } from '../../../src/utils'
|
||||
import { SOURCE_DOCUMENTS_PREFIX } from '../../../src/agents'
|
||||
import { RunnableConfig } from '@langchain/core/runnables'
|
||||
import { VectorStoreRetriever } from '@langchain/core/vectorstores'
|
||||
|
|
@ -58,7 +58,7 @@ class DynamicStructuredTool<T extends z.ZodObject<any, any, any, any> = z.ZodObj
|
|||
}
|
||||
let parsed
|
||||
try {
|
||||
parsed = await this.schema.parseAsync(arg)
|
||||
parsed = await parseWithTypeConversion(this.schema, arg)
|
||||
} catch (e) {
|
||||
throw new ToolInputParsingException(`Received tool input did not match expected schema`, JSON.stringify(arg))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,87 +0,0 @@
|
|||
import { z } from 'zod'
|
||||
import { StructuredTool, ToolParams } from '@langchain/core/tools'
|
||||
import { Serializable } from '@langchain/core/load/serializable'
|
||||
import { NodeFileStore } from 'langchain/stores/file/node'
|
||||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
|
||||
abstract class BaseFileStore extends Serializable {
|
||||
abstract readFile(path: string): Promise<string>
|
||||
abstract writeFile(path: string, contents: string): Promise<void>
|
||||
}
|
||||
|
||||
class WriteFile_Tools implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Write File'
|
||||
this.name = 'writeFile'
|
||||
this.version = 1.0
|
||||
this.type = 'WriteFile'
|
||||
this.icon = 'writefile.svg'
|
||||
this.category = 'Tools'
|
||||
this.description = 'Write file to disk'
|
||||
this.baseClasses = [this.type, 'Tool', ...getBaseClasses(WriteFileTool)]
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Base Path',
|
||||
name: 'basePath',
|
||||
placeholder: `C:\\Users\\User\\Desktop`,
|
||||
type: 'string',
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
const basePath = nodeData.inputs?.basePath as string
|
||||
const store = basePath ? new NodeFileStore(basePath) : new NodeFileStore()
|
||||
return new WriteFileTool({ store })
|
||||
}
|
||||
}
|
||||
|
||||
interface WriteFileParams extends ToolParams {
|
||||
store: BaseFileStore
|
||||
}
|
||||
|
||||
/**
|
||||
* Class for writing data to files on the disk. Extends the StructuredTool
|
||||
* class.
|
||||
*/
|
||||
export class WriteFileTool extends StructuredTool {
|
||||
static lc_name() {
|
||||
return 'WriteFileTool'
|
||||
}
|
||||
|
||||
schema = z.object({
|
||||
file_path: z.string().describe('name of file'),
|
||||
text: z.string().describe('text to write to file')
|
||||
}) as any
|
||||
|
||||
name = 'write_file'
|
||||
|
||||
description = 'Write file from disk'
|
||||
|
||||
store: BaseFileStore
|
||||
|
||||
constructor({ store, ...rest }: WriteFileParams) {
|
||||
super(rest)
|
||||
|
||||
this.store = store
|
||||
}
|
||||
|
||||
async _call({ file_path, text }: z.infer<typeof this.schema>) {
|
||||
await this.store.writeFile(file_path, text)
|
||||
return 'File written to successfully.'
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: WriteFile_Tools }
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M25 18V25C25 26.1046 24.1046 27 23 27H9C7.89543 27 7 26.1046 7 25V7C7 5.89543 7.89543 5 9 5H18L19 6" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M12 19.3284V22H14.6716C15.202 22 15.7107 21.7893 16.0858 21.4142L24.5858 12.9142C25.3668 12.1332 25.3668 10.8668 24.5858 10.0858L23.9142 9.41421C23.1332 8.63316 21.8668 8.63317 21.0858 9.41421L12.5858 17.9142C12.2107 18.2893 12 18.798 12 19.3284Z" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 632 B |
|
|
@ -84,11 +84,16 @@ class CustomFunction_Utilities implements INode {
|
|||
|
||||
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
||||
const flow = {
|
||||
input,
|
||||
chatflowId: options.chatflowid,
|
||||
sessionId: options.sessionId,
|
||||
chatId: options.chatId,
|
||||
rawOutput: options.rawOutput || '',
|
||||
input
|
||||
rawOutput: options.postProcessing?.rawOutput || '',
|
||||
chatHistory: options.postProcessing?.chatHistory || [],
|
||||
sourceDocuments: options.postProcessing?.sourceDocuments,
|
||||
usedTools: options.postProcessing?.usedTools,
|
||||
artifacts: options.postProcessing?.artifacts,
|
||||
fileAnnotations: options.postProcessing?.fileAnnotations
|
||||
}
|
||||
|
||||
let inputVars: ICommonObject = {}
|
||||
|
|
@ -132,9 +137,7 @@ class CustomFunction_Utilities implements INode {
|
|||
const sandbox = createCodeExecutionSandbox(input, variables, flow, additionalSandbox)
|
||||
|
||||
try {
|
||||
const response = await executeJavaScriptCode(javascriptFunction, sandbox, {
|
||||
timeout: 10000
|
||||
})
|
||||
const response = await executeJavaScriptCode(javascriptFunction, sandbox)
|
||||
|
||||
if (typeof response === 'string' && !isEndingNode) {
|
||||
return handleEscapeCharacters(response, false)
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue