Compare commits
1 Commits
main
...
chore/Leav
| Author | SHA1 | Date |
|---|---|---|
|
|
c10d91725e |
|
|
@ -1,72 +0,0 @@
|
||||||
name: Docker Image CI - Docker Hub
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
node_version:
|
|
||||||
description: 'Node.js version to build this image with.'
|
|
||||||
type: choice
|
|
||||||
required: true
|
|
||||||
default: '20'
|
|
||||||
options:
|
|
||||||
- '20'
|
|
||||||
tag_version:
|
|
||||||
description: 'Tag version of the image to be pushed.'
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
default: 'latest'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
docker:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Set default values
|
|
||||||
id: defaults
|
|
||||||
run: |
|
|
||||||
echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT
|
|
||||||
echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4.1.1
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3.0.0
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3.0.0
|
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
# -------------------------
|
|
||||||
# Build and push main image
|
|
||||||
# -------------------------
|
|
||||||
- name: Build and push main image
|
|
||||||
uses: docker/build-push-action@v5.3.0
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ./docker/Dockerfile
|
|
||||||
build-args: |
|
|
||||||
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: |
|
|
||||||
flowiseai/flowise:${{ steps.defaults.outputs.tag_version }}
|
|
||||||
|
|
||||||
# -------------------------
|
|
||||||
# Build and push worker image
|
|
||||||
# -------------------------
|
|
||||||
- name: Build and push worker image
|
|
||||||
uses: docker/build-push-action@v5.3.0
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: docker/worker/Dockerfile
|
|
||||||
build-args: |
|
|
||||||
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: |
|
|
||||||
flowiseai/flowise-worker:${{ steps.defaults.outputs.tag_version }}
|
|
||||||
|
|
@ -1,73 +0,0 @@
|
||||||
name: Docker Image CI - AWS ECR
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
environment:
|
|
||||||
description: 'Environment to push the image to.'
|
|
||||||
required: true
|
|
||||||
default: 'dev'
|
|
||||||
type: choice
|
|
||||||
options:
|
|
||||||
- dev
|
|
||||||
- prod
|
|
||||||
node_version:
|
|
||||||
description: 'Node.js version to build this image with.'
|
|
||||||
type: choice
|
|
||||||
required: true
|
|
||||||
default: '20'
|
|
||||||
options:
|
|
||||||
- '20'
|
|
||||||
tag_version:
|
|
||||||
description: 'Tag version of the image to be pushed.'
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
default: 'latest'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
docker:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
environment: ${{ github.event.inputs.environment }}
|
|
||||||
steps:
|
|
||||||
- name: Set default values
|
|
||||||
id: defaults
|
|
||||||
run: |
|
|
||||||
echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT
|
|
||||||
echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4.1.1
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3.0.0
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3.0.0
|
|
||||||
|
|
||||||
- name: Configure AWS Credentials
|
|
||||||
uses: aws-actions/configure-aws-credentials@v3
|
|
||||||
with:
|
|
||||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
||||||
aws-region: ${{ secrets.AWS_REGION }}
|
|
||||||
|
|
||||||
- name: Login to Amazon ECR
|
|
||||||
uses: aws-actions/amazon-ecr-login@v1
|
|
||||||
|
|
||||||
# -------------------------
|
|
||||||
# Build and push main image
|
|
||||||
# -------------------------
|
|
||||||
- name: Build and push main image
|
|
||||||
uses: docker/build-push-action@v5.3.0
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: Dockerfile
|
|
||||||
build-args: |
|
|
||||||
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: |
|
|
||||||
${{ format('{0}.dkr.ecr.{1}.amazonaws.com/flowise:{2}',
|
|
||||||
secrets.AWS_ACCOUNT_ID,
|
|
||||||
secrets.AWS_REGION,
|
|
||||||
steps.defaults.outputs.tag_version) }}
|
|
||||||
|
|
@ -0,0 +1,114 @@
|
||||||
|
name: Docker Image CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
registry:
|
||||||
|
description: 'Container Registry to push the image to.'
|
||||||
|
type: choice
|
||||||
|
required: true
|
||||||
|
default: 'aws_ecr'
|
||||||
|
options:
|
||||||
|
- 'docker_hub'
|
||||||
|
- 'aws_ecr'
|
||||||
|
environment:
|
||||||
|
description: 'Environment to push the image to.'
|
||||||
|
required: true
|
||||||
|
default: 'dev'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- dev
|
||||||
|
- prod
|
||||||
|
image_type:
|
||||||
|
description: 'Type of image to build and push.'
|
||||||
|
type: choice
|
||||||
|
required: true
|
||||||
|
default: 'main'
|
||||||
|
options:
|
||||||
|
- 'main'
|
||||||
|
- 'worker'
|
||||||
|
node_version:
|
||||||
|
description: 'Node.js version to build this image with.'
|
||||||
|
type: choice
|
||||||
|
required: true
|
||||||
|
default: '20'
|
||||||
|
options:
|
||||||
|
- '20'
|
||||||
|
tag_version:
|
||||||
|
description: 'Tag version of the image to be pushed.'
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
default: 'latest'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
docker:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
environment: ${{ github.event.inputs.environment }}
|
||||||
|
steps:
|
||||||
|
- name: Set default values
|
||||||
|
id: defaults
|
||||||
|
run: |
|
||||||
|
echo "registry=${{ github.event.inputs.registry || 'aws_ecr' }}" >> $GITHUB_OUTPUT
|
||||||
|
echo "image_type=${{ github.event.inputs.image_type || 'main' }}" >> $GITHUB_OUTPUT
|
||||||
|
echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT
|
||||||
|
echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4.1.1
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3.0.0
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3.0.0
|
||||||
|
|
||||||
|
# ------------------------
|
||||||
|
# Login Steps (conditional)
|
||||||
|
# ------------------------
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
if: steps.defaults.outputs.registry == 'docker_hub'
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Configure AWS Credentials
|
||||||
|
if: steps.defaults.outputs.registry == 'aws_ecr'
|
||||||
|
uses: aws-actions/configure-aws-credentials@v3
|
||||||
|
with:
|
||||||
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
aws-region: ${{ secrets.AWS_REGION }}
|
||||||
|
|
||||||
|
- name: Login to Amazon ECR
|
||||||
|
if: steps.defaults.outputs.registry == 'aws_ecr'
|
||||||
|
uses: aws-actions/amazon-ecr-login@v1
|
||||||
|
|
||||||
|
# -------------------------
|
||||||
|
# Build and push (conditional tags)
|
||||||
|
# -------------------------
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v5.3.0
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: |
|
||||||
|
${{
|
||||||
|
steps.defaults.outputs.image_type == 'worker' && 'docker/worker/Dockerfile' ||
|
||||||
|
(steps.defaults.outputs.registry == 'docker_hub' && './docker/Dockerfile' || 'Dockerfile')
|
||||||
|
}}
|
||||||
|
build-args: |
|
||||||
|
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: |
|
||||||
|
${{
|
||||||
|
steps.defaults.outputs.registry == 'docker_hub' &&
|
||||||
|
format('flowiseai/flowise{0}:{1}',
|
||||||
|
steps.defaults.outputs.image_type == 'worker' && '-worker' || '',
|
||||||
|
steps.defaults.outputs.tag_version) ||
|
||||||
|
format('{0}.dkr.ecr.{1}.amazonaws.com/flowise{2}:{3}',
|
||||||
|
secrets.AWS_ACCOUNT_ID,
|
||||||
|
secrets.AWS_REGION,
|
||||||
|
steps.defaults.outputs.image_type == 'worker' && '-worker' || '',
|
||||||
|
steps.defaults.outputs.tag_version)
|
||||||
|
}}
|
||||||
|
|
@ -114,7 +114,7 @@ Flowise has 3 different modules in a single mono repository.
|
||||||
|
|
||||||
to make sure everything works fine in production.
|
to make sure everything works fine in production.
|
||||||
|
|
||||||
11. Commit code and submit Pull Request from forked branch pointing to [Flowise main](https://github.com/FlowiseAI/Flowise/tree/main).
|
11. Commit code and submit Pull Request from forked branch pointing to [Flowise master](https://github.com/FlowiseAI/Flowise/tree/master).
|
||||||
|
|
||||||
## 🌱 Env Variables
|
## 🌱 Env Variables
|
||||||
|
|
||||||
|
|
|
||||||
39
Dockerfile
39
Dockerfile
|
|
@ -5,41 +5,34 @@
|
||||||
# docker run -d -p 3000:3000 flowise
|
# docker run -d -p 3000:3000 flowise
|
||||||
|
|
||||||
FROM node:20-alpine
|
FROM node:20-alpine
|
||||||
|
RUN apk add --update libc6-compat python3 make g++
|
||||||
|
# needed for pdfjs-dist
|
||||||
|
RUN apk add --no-cache build-base cairo-dev pango-dev
|
||||||
|
|
||||||
# Install system dependencies and build tools
|
# Install Chromium
|
||||||
RUN apk update && \
|
RUN apk add --no-cache chromium
|
||||||
apk add --no-cache \
|
|
||||||
libc6-compat \
|
# Install curl for container-level health checks
|
||||||
python3 \
|
# Fixes: https://github.com/FlowiseAI/Flowise/issues/4126
|
||||||
make \
|
RUN apk add --no-cache curl
|
||||||
g++ \
|
|
||||||
build-base \
|
#install PNPM globaly
|
||||||
cairo-dev \
|
RUN npm install -g pnpm
|
||||||
pango-dev \
|
|
||||||
chromium \
|
|
||||||
curl && \
|
|
||||||
npm install -g pnpm
|
|
||||||
|
|
||||||
ENV PUPPETEER_SKIP_DOWNLOAD=true
|
ENV PUPPETEER_SKIP_DOWNLOAD=true
|
||||||
ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium-browser
|
ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium-browser
|
||||||
|
|
||||||
ENV NODE_OPTIONS=--max-old-space-size=8192
|
ENV NODE_OPTIONS=--max-old-space-size=8192
|
||||||
|
|
||||||
WORKDIR /usr/src/flowise
|
WORKDIR /usr/src
|
||||||
|
|
||||||
# Copy app source
|
# Copy app source
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Install dependencies and build
|
RUN pnpm install
|
||||||
RUN pnpm install && \
|
|
||||||
pnpm build
|
|
||||||
|
|
||||||
# Give the node user ownership of the application files
|
RUN pnpm build
|
||||||
RUN chown -R node:node .
|
|
||||||
|
|
||||||
# Switch to non-root user (node user already exists in node:20-alpine)
|
|
||||||
USER node
|
|
||||||
|
|
||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
|
|
||||||
CMD [ "pnpm", "start" ]
|
CMD [ "pnpm", "start" ]
|
||||||
|
|
|
||||||
|
|
@ -190,10 +190,6 @@ Deploy Flowise self-hosted in your existing infrastructure, we support various [
|
||||||
|
|
||||||
[](https://railway.app/template/pn4G8S?referralCode=WVNPD9)
|
[](https://railway.app/template/pn4G8S?referralCode=WVNPD9)
|
||||||
|
|
||||||
- [Northflank](https://northflank.com/stacks/deploy-flowiseai)
|
|
||||||
|
|
||||||
[](https://northflank.com/stacks/deploy-flowiseai)
|
|
||||||
|
|
||||||
- [Render](https://docs.flowiseai.com/configuration/deployment/render)
|
- [Render](https://docs.flowiseai.com/configuration/deployment/render)
|
||||||
|
|
||||||
[](https://docs.flowiseai.com/configuration/deployment/render)
|
[](https://docs.flowiseai.com/configuration/deployment/render)
|
||||||
|
|
|
||||||
58
SECURITY.md
58
SECURITY.md
|
|
@ -1,38 +1,40 @@
|
||||||
### Responsible Disclosure Policy
|
### Responsible Disclosure Policy
|
||||||
|
|
||||||
At Flowise, we prioritize security and continuously work to safeguard our systems. However, vulnerabilities can still exist. If you identify a security issue, please report it to us so we can address it promptly. Your cooperation helps us better protect our platform and users.
|
At Flowise, we prioritize security and continuously work to safeguard our systems. However, vulnerabilities can still exist. If you identify a security issue, please report it to us so we can address it promptly. Your cooperation helps us better protect our platform and users.
|
||||||
|
|
||||||
### Out of scope vulnerabilities
|
### Vulnerabilities
|
||||||
|
|
||||||
- Clickjacking on pages without sensitive actions
|
The following types of issues are some of the most common vulnerabilities:
|
||||||
- CSRF on unauthenticated/logout/login pages
|
|
||||||
- Attacks requiring MITM (Man-in-the-Middle) or physical device access
|
|
||||||
- Social engineering attacks
|
|
||||||
- Activities that cause service disruption (DoS)
|
|
||||||
- Content spoofing and text injection without a valid attack vector
|
|
||||||
- Email spoofing
|
|
||||||
- Absence of DNSSEC, CAA, CSP headers
|
|
||||||
- Missing Secure or HTTP-only flag on non-sensitive cookies
|
|
||||||
- Deadlinks
|
|
||||||
- User enumeration
|
|
||||||
|
|
||||||
### Reporting Guidelines
|
- Clickjacking on pages without sensitive actions
|
||||||
|
- CSRF on unauthenticated/logout/login pages
|
||||||
|
- Attacks requiring MITM (Man-in-the-Middle) or physical device access
|
||||||
|
- Social engineering attacks
|
||||||
|
- Activities that cause service disruption (DoS)
|
||||||
|
- Content spoofing and text injection without a valid attack vector
|
||||||
|
- Email spoofing
|
||||||
|
- Absence of DNSSEC, CAA, CSP headers
|
||||||
|
- Missing Secure or HTTP-only flag on non-sensitive cookies
|
||||||
|
- Deadlinks
|
||||||
|
- User enumeration
|
||||||
|
|
||||||
- Submit your findings to https://github.com/FlowiseAI/Flowise/security
|
### Reporting Guidelines
|
||||||
- Provide clear details to help us reproduce and fix the issue quickly.
|
|
||||||
|
|
||||||
### Disclosure Guidelines
|
- Submit your findings to https://github.com/FlowiseAI/Flowise/security
|
||||||
|
- Provide clear details to help us reproduce and fix the issue quickly.
|
||||||
|
|
||||||
- Do not publicly disclose vulnerabilities until we have assessed, resolved, and notified affected users.
|
### Disclosure Guidelines
|
||||||
- If you plan to present your research (e.g., at a conference or in a blog), share a draft with us at least **30 days in advance** for review.
|
|
||||||
- Avoid including:
|
|
||||||
- Data from any Flowise customer projects
|
|
||||||
- Flowise user/customer information
|
|
||||||
- Details about Flowise employees, contractors, or partners
|
|
||||||
|
|
||||||
### Response to Reports
|
- Do not publicly disclose vulnerabilities until we have assessed, resolved, and notified affected users.
|
||||||
|
- If you plan to present your research (e.g., at a conference or in a blog), share a draft with us at least **30 days in advance** for review.
|
||||||
|
- Avoid including:
|
||||||
|
- Data from any Flowise customer projects
|
||||||
|
- Flowise user/customer information
|
||||||
|
- Details about Flowise employees, contractors, or partners
|
||||||
|
|
||||||
- We will acknowledge your report within **5 business days** and provide an estimated resolution timeline.
|
### Response to Reports
|
||||||
- Your report will be kept **confidential**, and your details will not be shared without your consent.
|
|
||||||
|
|
||||||
We appreciate your efforts in helping us maintain a secure platform and look forward to working together to resolve any issues responsibly.
|
- We will acknowledge your report within **5 business days** and provide an estimated resolution timeline.
|
||||||
|
- Your report will be kept **confidential**, and your details will not be shared without your consent.
|
||||||
|
|
||||||
|
We appreciate your efforts in helping us maintain a secure platform and look forward to working together to resolve any issues responsibly.
|
||||||
|
|
|
||||||
|
|
@ -38,8 +38,6 @@ SECRETKEY_PATH=/root/.flowise
|
||||||
# DEBUG=true
|
# DEBUG=true
|
||||||
LOG_PATH=/root/.flowise/logs
|
LOG_PATH=/root/.flowise/logs
|
||||||
# LOG_LEVEL=info #(error | warn | info | verbose | debug)
|
# LOG_LEVEL=info #(error | warn | info | verbose | debug)
|
||||||
# LOG_SANITIZE_BODY_FIELDS=password,pwd,pass,secret,token,apikey,api_key,accesstoken,access_token,refreshtoken,refresh_token,clientsecret,client_secret,privatekey,private_key,secretkey,secret_key,auth,authorization,credential,credentials
|
|
||||||
# LOG_SANITIZE_HEADER_FIELDS=authorization,x-api-key,x-auth-token,cookie
|
|
||||||
# TOOL_FUNCTION_BUILTIN_DEP=crypto,fs
|
# TOOL_FUNCTION_BUILTIN_DEP=crypto,fs
|
||||||
# TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash
|
# TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash
|
||||||
# ALLOW_BUILTIN_DEP=false
|
# ALLOW_BUILTIN_DEP=false
|
||||||
|
|
@ -101,7 +99,6 @@ JWT_TOKEN_EXPIRY_IN_MINUTES=360
|
||||||
JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200
|
JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200
|
||||||
# EXPIRE_AUTH_TOKENS_ON_RESTART=true # (if you need to expire all tokens on app restart)
|
# EXPIRE_AUTH_TOKENS_ON_RESTART=true # (if you need to expire all tokens on app restart)
|
||||||
# EXPRESS_SESSION_SECRET=flowise
|
# EXPRESS_SESSION_SECRET=flowise
|
||||||
# SECURE_COOKIES=
|
|
||||||
|
|
||||||
# INVITE_TOKEN_EXPIRY_IN_HOURS=24
|
# INVITE_TOKEN_EXPIRY_IN_HOURS=24
|
||||||
# PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=15
|
# PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=15
|
||||||
|
|
|
||||||
|
|
@ -46,8 +46,6 @@ services:
|
||||||
- DEBUG=${DEBUG}
|
- DEBUG=${DEBUG}
|
||||||
- LOG_PATH=${LOG_PATH}
|
- LOG_PATH=${LOG_PATH}
|
||||||
- LOG_LEVEL=${LOG_LEVEL}
|
- LOG_LEVEL=${LOG_LEVEL}
|
||||||
- LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS}
|
|
||||||
- LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS}
|
|
||||||
|
|
||||||
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
||||||
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
||||||
|
|
@ -91,7 +89,6 @@ services:
|
||||||
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
||||||
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
||||||
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
||||||
- SECURE_COOKIES=${SECURE_COOKIES}
|
|
||||||
|
|
||||||
# EMAIL
|
# EMAIL
|
||||||
- SMTP_HOST=${SMTP_HOST}
|
- SMTP_HOST=${SMTP_HOST}
|
||||||
|
|
@ -192,8 +189,6 @@ services:
|
||||||
- DEBUG=${DEBUG}
|
- DEBUG=${DEBUG}
|
||||||
- LOG_PATH=${LOG_PATH}
|
- LOG_PATH=${LOG_PATH}
|
||||||
- LOG_LEVEL=${LOG_LEVEL}
|
- LOG_LEVEL=${LOG_LEVEL}
|
||||||
- LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS}
|
|
||||||
- LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS}
|
|
||||||
|
|
||||||
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
||||||
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
||||||
|
|
@ -237,7 +232,6 @@ services:
|
||||||
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
||||||
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
||||||
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
||||||
- SECURE_COOKIES=${SECURE_COOKIES}
|
|
||||||
|
|
||||||
# EMAIL
|
# EMAIL
|
||||||
- SMTP_HOST=${SMTP_HOST}
|
- SMTP_HOST=${SMTP_HOST}
|
||||||
|
|
|
||||||
|
|
@ -31,8 +31,6 @@ services:
|
||||||
- DEBUG=${DEBUG}
|
- DEBUG=${DEBUG}
|
||||||
- LOG_PATH=${LOG_PATH}
|
- LOG_PATH=${LOG_PATH}
|
||||||
- LOG_LEVEL=${LOG_LEVEL}
|
- LOG_LEVEL=${LOG_LEVEL}
|
||||||
- LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS}
|
|
||||||
- LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS}
|
|
||||||
|
|
||||||
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
||||||
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
||||||
|
|
@ -76,7 +74,6 @@ services:
|
||||||
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
||||||
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
||||||
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
||||||
- SECURE_COOKIES=${SECURE_COOKIES}
|
|
||||||
|
|
||||||
# EMAIL
|
# EMAIL
|
||||||
- SMTP_HOST=${SMTP_HOST}
|
- SMTP_HOST=${SMTP_HOST}
|
||||||
|
|
|
||||||
|
|
@ -38,8 +38,6 @@ SECRETKEY_PATH=/root/.flowise
|
||||||
# DEBUG=true
|
# DEBUG=true
|
||||||
LOG_PATH=/root/.flowise/logs
|
LOG_PATH=/root/.flowise/logs
|
||||||
# LOG_LEVEL=info #(error | warn | info | verbose | debug)
|
# LOG_LEVEL=info #(error | warn | info | verbose | debug)
|
||||||
# LOG_SANITIZE_BODY_FIELDS=password,pwd,pass,secret,token,apikey,api_key,accesstoken,access_token,refreshtoken,refresh_token,clientsecret,client_secret,privatekey,private_key,secretkey,secret_key,auth,authorization,credential,credentials
|
|
||||||
# LOG_SANITIZE_HEADER_FIELDS=authorization,x-api-key,x-auth-token,cookie
|
|
||||||
# TOOL_FUNCTION_BUILTIN_DEP=crypto,fs
|
# TOOL_FUNCTION_BUILTIN_DEP=crypto,fs
|
||||||
# TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash
|
# TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash
|
||||||
# ALLOW_BUILTIN_DEP=false
|
# ALLOW_BUILTIN_DEP=false
|
||||||
|
|
@ -101,7 +99,6 @@ JWT_TOKEN_EXPIRY_IN_MINUTES=360
|
||||||
JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200
|
JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200
|
||||||
# EXPIRE_AUTH_TOKENS_ON_RESTART=true # (if you need to expire all tokens on app restart)
|
# EXPIRE_AUTH_TOKENS_ON_RESTART=true # (if you need to expire all tokens on app restart)
|
||||||
# EXPRESS_SESSION_SECRET=flowise
|
# EXPRESS_SESSION_SECRET=flowise
|
||||||
# SECURE_COOKIES=
|
|
||||||
|
|
||||||
# INVITE_TOKEN_EXPIRY_IN_HOURS=24
|
# INVITE_TOKEN_EXPIRY_IN_HOURS=24
|
||||||
# PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=15
|
# PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=15
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ RUN apk add --no-cache build-base cairo-dev pango-dev
|
||||||
# Install Chromium and curl for container-level health checks
|
# Install Chromium and curl for container-level health checks
|
||||||
RUN apk add --no-cache chromium curl
|
RUN apk add --no-cache chromium curl
|
||||||
|
|
||||||
#install PNPM globally
|
#install PNPM globaly
|
||||||
RUN npm install -g pnpm
|
RUN npm install -g pnpm
|
||||||
|
|
||||||
ENV PUPPETEER_SKIP_DOWNLOAD=true
|
ENV PUPPETEER_SKIP_DOWNLOAD=true
|
||||||
|
|
|
||||||
|
|
@ -31,8 +31,6 @@ services:
|
||||||
- DEBUG=${DEBUG}
|
- DEBUG=${DEBUG}
|
||||||
- LOG_PATH=${LOG_PATH}
|
- LOG_PATH=${LOG_PATH}
|
||||||
- LOG_LEVEL=${LOG_LEVEL}
|
- LOG_LEVEL=${LOG_LEVEL}
|
||||||
- LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS}
|
|
||||||
- LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS}
|
|
||||||
|
|
||||||
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
||||||
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
||||||
|
|
@ -76,7 +74,6 @@ services:
|
||||||
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
||||||
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
||||||
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
||||||
- SECURE_COOKIES=${SECURE_COOKIES}
|
|
||||||
|
|
||||||
# EMAIL
|
# EMAIL
|
||||||
- SMTP_HOST=${SMTP_HOST}
|
- SMTP_HOST=${SMTP_HOST}
|
||||||
|
|
|
||||||
|
|
@ -112,7 +112,7 @@ Flowise 在一个单一的单体存储库中有 3 个不同的模块。
|
||||||
pnpm start
|
pnpm start
|
||||||
```
|
```
|
||||||
|
|
||||||
11. 提交代码并从指向 [Flowise 主分支](https://github.com/FlowiseAI/Flowise/tree/main) 的分叉分支上提交 Pull Request。
|
11. 提交代码并从指向 [Flowise 主分支](https://github.com/FlowiseAI/Flowise/tree/master) 的分叉分支上提交 Pull Request。
|
||||||
|
|
||||||
## 🌱 环境变量
|
## 🌱 环境变量
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
version: "2"
|
version: "2"
|
||||||
services:
|
services:
|
||||||
otel-collector:
|
otel-collector:
|
||||||
read_only: true
|
|
||||||
image: otel/opentelemetry-collector-contrib
|
image: otel/opentelemetry-collector-contrib
|
||||||
command: ["--config=/etc/otelcol-contrib/config.yaml", "--feature-gates=-exporter.datadogexporter.DisableAPMStats", "${OTELCOL_ARGS}"]
|
command: ["--config=/etc/otelcol-contrib/config.yaml", "--feature-gates=-exporter.datadogexporter.DisableAPMStats", "${OTELCOL_ARGS}"]
|
||||||
volumes:
|
volumes:
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "flowise",
|
"name": "flowise",
|
||||||
"version": "3.0.11",
|
"version": "3.0.7",
|
||||||
"private": true,
|
"private": true,
|
||||||
"homepage": "https://flowiseai.com",
|
"homepage": "https://flowiseai.com",
|
||||||
"workspaces": [
|
"workspaces": [
|
||||||
|
|
@ -51,7 +51,7 @@
|
||||||
"eslint-plugin-react-hooks": "^4.6.0",
|
"eslint-plugin-react-hooks": "^4.6.0",
|
||||||
"eslint-plugin-unused-imports": "^2.0.0",
|
"eslint-plugin-unused-imports": "^2.0.0",
|
||||||
"husky": "^8.0.1",
|
"husky": "^8.0.1",
|
||||||
"kill-port": "2.0.1",
|
"kill-port": "^2.0.1",
|
||||||
"lint-staged": "^13.0.3",
|
"lint-staged": "^13.0.3",
|
||||||
"prettier": "^2.7.1",
|
"prettier": "^2.7.1",
|
||||||
"pretty-quick": "^3.1.3",
|
"pretty-quick": "^3.1.3",
|
||||||
|
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
import { INodeParams, INodeCredential } from '../src/Interface'
|
|
||||||
|
|
||||||
class ElevenLabsApi implements INodeCredential {
|
|
||||||
label: string
|
|
||||||
name: string
|
|
||||||
version: number
|
|
||||||
description: string
|
|
||||||
inputs: INodeParams[]
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
this.label = 'Eleven Labs API'
|
|
||||||
this.name = 'elevenLabsApi'
|
|
||||||
this.version = 1.0
|
|
||||||
this.description =
|
|
||||||
'Sign up for a Eleven Labs account and <a target="_blank" href="https://elevenlabs.io/app/settings/api-keys">create an API Key</a>.'
|
|
||||||
this.inputs = [
|
|
||||||
{
|
|
||||||
label: 'Eleven Labs API Key',
|
|
||||||
name: 'elevenLabsApiKey',
|
|
||||||
type: 'password'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { credClass: ElevenLabsApi }
|
|
||||||
|
|
@ -1,47 +0,0 @@
|
||||||
import { INodeParams, INodeCredential } from '../src/Interface'
|
|
||||||
|
|
||||||
class TeradataVectorStoreApiCredentials implements INodeCredential {
|
|
||||||
label: string
|
|
||||||
name: string
|
|
||||||
version: number
|
|
||||||
inputs: INodeParams[]
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
this.label = 'Teradata Vector Store API Credentials'
|
|
||||||
this.name = 'teradataVectorStoreApiCredentials'
|
|
||||||
this.version = 1.0
|
|
||||||
this.inputs = [
|
|
||||||
{
|
|
||||||
label: 'Teradata Host IP',
|
|
||||||
name: 'tdHostIp',
|
|
||||||
type: 'string'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Username',
|
|
||||||
name: 'tdUsername',
|
|
||||||
type: 'string'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Password',
|
|
||||||
name: 'tdPassword',
|
|
||||||
type: 'password'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Vector_Store_Base_URL',
|
|
||||||
name: 'baseURL',
|
|
||||||
description: 'Teradata Vector Store Base URL',
|
|
||||||
placeholder: `Base_URL`,
|
|
||||||
type: 'string'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'JWT Token',
|
|
||||||
name: 'jwtToken',
|
|
||||||
type: 'password',
|
|
||||||
description: 'Bearer token for JWT authentication',
|
|
||||||
optional: true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { credClass: TeradataVectorStoreApiCredentials }
|
|
||||||
|
|
@ -3,27 +3,6 @@
|
||||||
{
|
{
|
||||||
"name": "awsChatBedrock",
|
"name": "awsChatBedrock",
|
||||||
"models": [
|
"models": [
|
||||||
{
|
|
||||||
"label": "anthropic.claude-opus-4-5-20251101-v1:0",
|
|
||||||
"name": "anthropic.claude-opus-4-5-20251101-v1:0",
|
|
||||||
"description": "Claude 4.5 Opus",
|
|
||||||
"input_cost": 0.000005,
|
|
||||||
"output_cost": 0.000025
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "anthropic.claude-sonnet-4-5-20250929-v1:0",
|
|
||||||
"name": "anthropic.claude-sonnet-4-5-20250929-v1:0",
|
|
||||||
"description": "Claude 4.5 Sonnet",
|
|
||||||
"input_cost": 0.000003,
|
|
||||||
"output_cost": 0.000015
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "anthropic.claude-haiku-4-5-20251001-v1:0",
|
|
||||||
"name": "anthropic.claude-haiku-4-5-20251001-v1:0",
|
|
||||||
"description": "Claude 4.5 Haiku",
|
|
||||||
"input_cost": 0.000001,
|
|
||||||
"output_cost": 0.000005
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"label": "openai.gpt-oss-20b-1:0",
|
"label": "openai.gpt-oss-20b-1:0",
|
||||||
"name": "openai.gpt-oss-20b-1:0",
|
"name": "openai.gpt-oss-20b-1:0",
|
||||||
|
|
@ -322,12 +301,6 @@
|
||||||
{
|
{
|
||||||
"name": "azureChatOpenAI",
|
"name": "azureChatOpenAI",
|
||||||
"models": [
|
"models": [
|
||||||
{
|
|
||||||
"label": "gpt-5.1",
|
|
||||||
"name": "gpt-5.1",
|
|
||||||
"input_cost": 0.00000125,
|
|
||||||
"output_cost": 0.00001
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"label": "gpt-5",
|
"label": "gpt-5",
|
||||||
"name": "gpt-5",
|
"name": "gpt-5",
|
||||||
|
|
@ -513,25 +486,11 @@
|
||||||
"name": "chatAnthropic",
|
"name": "chatAnthropic",
|
||||||
"models": [
|
"models": [
|
||||||
{
|
{
|
||||||
"label": "claude-opus-4-5",
|
"label": "claude-opus-4-1",
|
||||||
"name": "claude-opus-4-5",
|
"name": "claude-opus-4-1",
|
||||||
"description": "Claude 4.5 Opus",
|
"description": "Claude 4.1 Opus",
|
||||||
"input_cost": 0.000005,
|
"input_cost": 0.000015,
|
||||||
"output_cost": 0.000025
|
"output_cost": 0.000075
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "claude-sonnet-4-5",
|
|
||||||
"name": "claude-sonnet-4-5",
|
|
||||||
"description": "Claude 4.5 Sonnet",
|
|
||||||
"input_cost": 0.000003,
|
|
||||||
"output_cost": 0.000015
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "claude-haiku-4-5",
|
|
||||||
"name": "claude-haiku-4-5",
|
|
||||||
"description": "Claude 4.5 Haiku",
|
|
||||||
"input_cost": 0.000001,
|
|
||||||
"output_cost": 0.000005
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "claude-sonnet-4-0",
|
"label": "claude-sonnet-4-0",
|
||||||
|
|
@ -540,13 +499,6 @@
|
||||||
"input_cost": 0.000003,
|
"input_cost": 0.000003,
|
||||||
"output_cost": 0.000015
|
"output_cost": 0.000015
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"label": "claude-opus-4-1",
|
|
||||||
"name": "claude-opus-4-1",
|
|
||||||
"description": "Claude 4.1 Opus",
|
|
||||||
"input_cost": 0.000015,
|
|
||||||
"output_cost": 0.000075
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"label": "claude-opus-4-0",
|
"label": "claude-opus-4-0",
|
||||||
"name": "claude-opus-4-0",
|
"name": "claude-opus-4-0",
|
||||||
|
|
@ -641,18 +593,6 @@
|
||||||
{
|
{
|
||||||
"name": "chatGoogleGenerativeAI",
|
"name": "chatGoogleGenerativeAI",
|
||||||
"models": [
|
"models": [
|
||||||
{
|
|
||||||
"label": "gemini-3-pro-preview",
|
|
||||||
"name": "gemini-3-pro-preview",
|
|
||||||
"input_cost": 0.00002,
|
|
||||||
"output_cost": 0.00012
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "gemini-3-pro-image-preview",
|
|
||||||
"name": "gemini-3-pro-image-preview",
|
|
||||||
"input_cost": 0.00002,
|
|
||||||
"output_cost": 0.00012
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"label": "gemini-2.5-pro",
|
"label": "gemini-2.5-pro",
|
||||||
"name": "gemini-2.5-pro",
|
"name": "gemini-2.5-pro",
|
||||||
|
|
@ -665,12 +605,6 @@
|
||||||
"input_cost": 1.25e-6,
|
"input_cost": 1.25e-6,
|
||||||
"output_cost": 0.00001
|
"output_cost": 0.00001
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"label": "gemini-2.5-flash-image",
|
|
||||||
"name": "gemini-2.5-flash-image",
|
|
||||||
"input_cost": 1.25e-6,
|
|
||||||
"output_cost": 0.00001
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"label": "gemini-2.5-flash-lite",
|
"label": "gemini-2.5-flash-lite",
|
||||||
"name": "gemini-2.5-flash-lite",
|
"name": "gemini-2.5-flash-lite",
|
||||||
|
|
@ -723,12 +657,6 @@
|
||||||
{
|
{
|
||||||
"name": "chatGoogleVertexAI",
|
"name": "chatGoogleVertexAI",
|
||||||
"models": [
|
"models": [
|
||||||
{
|
|
||||||
"label": "gemini-3-pro-preview",
|
|
||||||
"name": "gemini-3-pro-preview",
|
|
||||||
"input_cost": 0.00002,
|
|
||||||
"output_cost": 0.00012
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"label": "gemini-2.5-pro",
|
"label": "gemini-2.5-pro",
|
||||||
"name": "gemini-2.5-pro",
|
"name": "gemini-2.5-pro",
|
||||||
|
|
@ -795,27 +723,6 @@
|
||||||
"input_cost": 1.25e-7,
|
"input_cost": 1.25e-7,
|
||||||
"output_cost": 3.75e-7
|
"output_cost": 3.75e-7
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"label": "claude-opus-4-5@20251101",
|
|
||||||
"name": "claude-opus-4-5@20251101",
|
|
||||||
"description": "Claude 4.5 Opus",
|
|
||||||
"input_cost": 0.000005,
|
|
||||||
"output_cost": 0.000025
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "claude-sonnet-4-5@20250929",
|
|
||||||
"name": "claude-sonnet-4-5@20250929",
|
|
||||||
"description": "Claude 4.5 Sonnet",
|
|
||||||
"input_cost": 0.000003,
|
|
||||||
"output_cost": 0.000015
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "claude-haiku-4-5@20251001",
|
|
||||||
"name": "claude-haiku-4-5@20251001",
|
|
||||||
"description": "Claude 4.5 Haiku",
|
|
||||||
"input_cost": 0.000001,
|
|
||||||
"output_cost": 0.000005
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"label": "claude-opus-4-1@20250805",
|
"label": "claude-opus-4-1@20250805",
|
||||||
"name": "claude-opus-4-1@20250805",
|
"name": "claude-opus-4-1@20250805",
|
||||||
|
|
@ -1047,12 +954,6 @@
|
||||||
{
|
{
|
||||||
"name": "chatOpenAI",
|
"name": "chatOpenAI",
|
||||||
"models": [
|
"models": [
|
||||||
{
|
|
||||||
"label": "gpt-5.1",
|
|
||||||
"name": "gpt-5.1",
|
|
||||||
"input_cost": 0.00000125,
|
|
||||||
"output_cost": 0.00001
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"label": "gpt-5",
|
"label": "gpt-5",
|
||||||
"name": "gpt-5",
|
"name": "gpt-5",
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -317,7 +317,7 @@ class Condition_Agentflow implements INode {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If no condition is fulfilled, add isFulfilled to the ELSE condition
|
// If no condition is fullfilled, add isFulfilled to the ELSE condition
|
||||||
const dummyElseConditionData = {
|
const dummyElseConditionData = {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
value1: '',
|
value1: '',
|
||||||
|
|
|
||||||
|
|
@ -60,7 +60,7 @@ class CustomFunction_Agentflow implements INode {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.label = 'Custom Function'
|
this.label = 'Custom Function'
|
||||||
this.name = 'customFunctionAgentflow'
|
this.name = 'customFunctionAgentflow'
|
||||||
this.version = 1.1
|
this.version = 1.0
|
||||||
this.type = 'CustomFunction'
|
this.type = 'CustomFunction'
|
||||||
this.category = 'Agent Flows'
|
this.category = 'Agent Flows'
|
||||||
this.description = 'Execute custom function'
|
this.description = 'Execute custom function'
|
||||||
|
|
@ -107,7 +107,8 @@ class CustomFunction_Agentflow implements INode {
|
||||||
label: 'Key',
|
label: 'Key',
|
||||||
name: 'key',
|
name: 'key',
|
||||||
type: 'asyncOptions',
|
type: 'asyncOptions',
|
||||||
loadMethod: 'listRuntimeStateKeys'
|
loadMethod: 'listRuntimeStateKeys',
|
||||||
|
freeSolo: true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Value',
|
label: 'Value',
|
||||||
|
|
@ -133,7 +134,7 @@ class CustomFunction_Agentflow implements INode {
|
||||||
|
|
||||||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
|
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
|
||||||
const javascriptFunction = nodeData.inputs?.customFunctionJavascriptFunction as string
|
const javascriptFunction = nodeData.inputs?.customFunctionJavascriptFunction as string
|
||||||
const functionInputVariables = (nodeData.inputs?.customFunctionInputVariables as ICustomFunctionInputVariables[]) ?? []
|
const functionInputVariables = nodeData.inputs?.customFunctionInputVariables as ICustomFunctionInputVariables[]
|
||||||
const _customFunctionUpdateState = nodeData.inputs?.customFunctionUpdateState
|
const _customFunctionUpdateState = nodeData.inputs?.customFunctionUpdateState
|
||||||
|
|
||||||
const state = options.agentflowRuntime?.state as ICommonObject
|
const state = options.agentflowRuntime?.state as ICommonObject
|
||||||
|
|
@ -146,17 +147,11 @@ class CustomFunction_Agentflow implements INode {
|
||||||
|
|
||||||
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
||||||
const flow = {
|
const flow = {
|
||||||
input,
|
|
||||||
state,
|
|
||||||
chatflowId: options.chatflowid,
|
chatflowId: options.chatflowid,
|
||||||
sessionId: options.sessionId,
|
sessionId: options.sessionId,
|
||||||
chatId: options.chatId,
|
chatId: options.chatId,
|
||||||
rawOutput: options.postProcessing?.rawOutput || '',
|
input,
|
||||||
chatHistory: options.postProcessing?.chatHistory || [],
|
state
|
||||||
sourceDocuments: options.postProcessing?.sourceDocuments,
|
|
||||||
usedTools: options.postProcessing?.usedTools,
|
|
||||||
artifacts: options.postProcessing?.artifacts,
|
|
||||||
fileAnnotations: options.postProcessing?.fileAnnotations
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create additional sandbox variables for custom function inputs
|
// Create additional sandbox variables for custom function inputs
|
||||||
|
|
|
||||||
|
|
@ -30,7 +30,7 @@ class ExecuteFlow_Agentflow implements INode {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.label = 'Execute Flow'
|
this.label = 'Execute Flow'
|
||||||
this.name = 'executeFlowAgentflow'
|
this.name = 'executeFlowAgentflow'
|
||||||
this.version = 1.2
|
this.version = 1.1
|
||||||
this.type = 'ExecuteFlow'
|
this.type = 'ExecuteFlow'
|
||||||
this.category = 'Agent Flows'
|
this.category = 'Agent Flows'
|
||||||
this.description = 'Execute another flow'
|
this.description = 'Execute another flow'
|
||||||
|
|
@ -102,7 +102,8 @@ class ExecuteFlow_Agentflow implements INode {
|
||||||
label: 'Key',
|
label: 'Key',
|
||||||
name: 'key',
|
name: 'key',
|
||||||
type: 'asyncOptions',
|
type: 'asyncOptions',
|
||||||
loadMethod: 'listRuntimeStateKeys'
|
loadMethod: 'listRuntimeStateKeys',
|
||||||
|
freeSolo: true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Value',
|
label: 'Value',
|
||||||
|
|
|
||||||
|
|
@ -67,8 +67,7 @@ class HTTP_Agentflow implements INode {
|
||||||
{
|
{
|
||||||
label: 'URL',
|
label: 'URL',
|
||||||
name: 'url',
|
name: 'url',
|
||||||
type: 'string',
|
type: 'string'
|
||||||
acceptVariable: true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Headers',
|
label: 'Headers',
|
||||||
|
|
|
||||||
|
|
@ -241,11 +241,8 @@ class HumanInput_Agentflow implements INode {
|
||||||
if (isStreamable) {
|
if (isStreamable) {
|
||||||
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
|
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
|
||||||
for await (const chunk of await llmNodeInstance.stream(messages)) {
|
for await (const chunk of await llmNodeInstance.stream(messages)) {
|
||||||
const content = typeof chunk === 'string' ? chunk : chunk.content.toString()
|
sseStreamer.streamTokenEvent(chatId, chunk.content.toString())
|
||||||
sseStreamer.streamTokenEvent(chatId, content)
|
response = response.concat(chunk)
|
||||||
|
|
||||||
const messageChunk = typeof chunk === 'string' ? new AIMessageChunk(chunk) : chunk
|
|
||||||
response = response.concat(messageChunk)
|
|
||||||
}
|
}
|
||||||
humanInputDescription = response.content as string
|
humanInputDescription = response.content as string
|
||||||
} else {
|
} else {
|
||||||
|
|
|
||||||
|
|
@ -2,20 +2,17 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
||||||
import { ICommonObject, IMessage, INode, INodeData, INodeOptionsValue, INodeParams, IServerSideEventStreamer } from '../../../src/Interface'
|
import { ICommonObject, IMessage, INode, INodeData, INodeOptionsValue, INodeParams, IServerSideEventStreamer } from '../../../src/Interface'
|
||||||
import { AIMessageChunk, BaseMessageLike, MessageContentText } from '@langchain/core/messages'
|
import { AIMessageChunk, BaseMessageLike, MessageContentText } from '@langchain/core/messages'
|
||||||
import { DEFAULT_SUMMARIZER_TEMPLATE } from '../prompt'
|
import { DEFAULT_SUMMARIZER_TEMPLATE } from '../prompt'
|
||||||
|
import { z } from 'zod'
|
||||||
import { AnalyticHandler } from '../../../src/handler'
|
import { AnalyticHandler } from '../../../src/handler'
|
||||||
import { ILLMMessage } from '../Interface.Agentflow'
|
import { ILLMMessage, IStructuredOutput } from '../Interface.Agentflow'
|
||||||
import {
|
import {
|
||||||
addImageArtifactsToMessages,
|
|
||||||
extractArtifactsFromResponse,
|
|
||||||
getPastChatHistoryImageMessages,
|
getPastChatHistoryImageMessages,
|
||||||
getUniqueImageMessages,
|
getUniqueImageMessages,
|
||||||
processMessagesWithImages,
|
processMessagesWithImages,
|
||||||
replaceBase64ImagesWithFileReferences,
|
replaceBase64ImagesWithFileReferences,
|
||||||
replaceInlineDataWithFileReferences,
|
|
||||||
updateFlowState
|
updateFlowState
|
||||||
} from '../utils'
|
} from '../utils'
|
||||||
import { processTemplateVariables, configureStructuredOutput } from '../../../src/utils'
|
import { processTemplateVariables } from '../../../src/utils'
|
||||||
import { flatten } from 'lodash'
|
|
||||||
|
|
||||||
class LLM_Agentflow implements INode {
|
class LLM_Agentflow implements INode {
|
||||||
label: string
|
label: string
|
||||||
|
|
@ -34,7 +31,7 @@ class LLM_Agentflow implements INode {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.label = 'LLM'
|
this.label = 'LLM'
|
||||||
this.name = 'llmAgentflow'
|
this.name = 'llmAgentflow'
|
||||||
this.version = 1.1
|
this.version = 1.0
|
||||||
this.type = 'LLM'
|
this.type = 'LLM'
|
||||||
this.category = 'Agent Flows'
|
this.category = 'Agent Flows'
|
||||||
this.description = 'Large language models to analyze user-provided inputs and generate responses'
|
this.description = 'Large language models to analyze user-provided inputs and generate responses'
|
||||||
|
|
@ -290,7 +287,8 @@ class LLM_Agentflow implements INode {
|
||||||
label: 'Key',
|
label: 'Key',
|
||||||
name: 'key',
|
name: 'key',
|
||||||
type: 'asyncOptions',
|
type: 'asyncOptions',
|
||||||
loadMethod: 'listRuntimeStateKeys'
|
loadMethod: 'listRuntimeStateKeys',
|
||||||
|
freeSolo: true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Value',
|
label: 'Value',
|
||||||
|
|
@ -450,16 +448,10 @@ class LLM_Agentflow implements INode {
|
||||||
}
|
}
|
||||||
delete nodeData.inputs?.llmMessages
|
delete nodeData.inputs?.llmMessages
|
||||||
|
|
||||||
/**
|
|
||||||
* Add image artifacts from previous assistant responses as user messages
|
|
||||||
* Images are converted from FILE-STORAGE::<image_path> to base 64 image_url format
|
|
||||||
*/
|
|
||||||
await addImageArtifactsToMessages(messages, options)
|
|
||||||
|
|
||||||
// Configure structured output if specified
|
// Configure structured output if specified
|
||||||
const isStructuredOutput = _llmStructuredOutput && Array.isArray(_llmStructuredOutput) && _llmStructuredOutput.length > 0
|
const isStructuredOutput = _llmStructuredOutput && Array.isArray(_llmStructuredOutput) && _llmStructuredOutput.length > 0
|
||||||
if (isStructuredOutput) {
|
if (isStructuredOutput) {
|
||||||
llmNodeInstance = configureStructuredOutput(llmNodeInstance, _llmStructuredOutput)
|
llmNodeInstance = this.configureStructuredOutput(llmNodeInstance, _llmStructuredOutput)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize response and determine if streaming is possible
|
// Initialize response and determine if streaming is possible
|
||||||
|
|
@ -475,11 +467,9 @@ class LLM_Agentflow implements INode {
|
||||||
|
|
||||||
// Track execution time
|
// Track execution time
|
||||||
const startTime = Date.now()
|
const startTime = Date.now()
|
||||||
|
|
||||||
const sseStreamer: IServerSideEventStreamer | undefined = options.sseStreamer
|
const sseStreamer: IServerSideEventStreamer | undefined = options.sseStreamer
|
||||||
|
|
||||||
/*
|
|
||||||
* Invoke LLM
|
|
||||||
*/
|
|
||||||
if (isStreamable) {
|
if (isStreamable) {
|
||||||
response = await this.handleStreamingResponse(sseStreamer, llmNodeInstance, messages, chatId, abortController)
|
response = await this.handleStreamingResponse(sseStreamer, llmNodeInstance, messages, chatId, abortController)
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -504,40 +494,6 @@ class LLM_Agentflow implements INode {
|
||||||
const endTime = Date.now()
|
const endTime = Date.now()
|
||||||
const timeDelta = endTime - startTime
|
const timeDelta = endTime - startTime
|
||||||
|
|
||||||
// Extract artifacts and file annotations from response metadata
|
|
||||||
let artifacts: any[] = []
|
|
||||||
let fileAnnotations: any[] = []
|
|
||||||
if (response.response_metadata) {
|
|
||||||
const {
|
|
||||||
artifacts: extractedArtifacts,
|
|
||||||
fileAnnotations: extractedFileAnnotations,
|
|
||||||
savedInlineImages
|
|
||||||
} = await extractArtifactsFromResponse(response.response_metadata, newNodeData, options)
|
|
||||||
|
|
||||||
if (extractedArtifacts.length > 0) {
|
|
||||||
artifacts = extractedArtifacts
|
|
||||||
|
|
||||||
// Stream artifacts if this is the last node
|
|
||||||
if (isLastNode && sseStreamer) {
|
|
||||||
sseStreamer.streamArtifactsEvent(chatId, artifacts)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (extractedFileAnnotations.length > 0) {
|
|
||||||
fileAnnotations = extractedFileAnnotations
|
|
||||||
|
|
||||||
// Stream file annotations if this is the last node
|
|
||||||
if (isLastNode && sseStreamer) {
|
|
||||||
sseStreamer.streamFileAnnotationsEvent(chatId, fileAnnotations)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Replace inlineData base64 with file references in the response
|
|
||||||
if (savedInlineImages && savedInlineImages.length > 0) {
|
|
||||||
replaceInlineDataWithFileReferences(response, savedInlineImages)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update flow state if needed
|
// Update flow state if needed
|
||||||
let newState = { ...state }
|
let newState = { ...state }
|
||||||
if (_llmUpdateState && Array.isArray(_llmUpdateState) && _llmUpdateState.length > 0) {
|
if (_llmUpdateState && Array.isArray(_llmUpdateState) && _llmUpdateState.length > 0) {
|
||||||
|
|
@ -557,22 +513,10 @@ class LLM_Agentflow implements INode {
|
||||||
finalResponse = response.content.map((item: any) => item.text).join('\n')
|
finalResponse = response.content.map((item: any) => item.text).join('\n')
|
||||||
} else if (response.content && typeof response.content === 'string') {
|
} else if (response.content && typeof response.content === 'string') {
|
||||||
finalResponse = response.content
|
finalResponse = response.content
|
||||||
} else if (response.content === '') {
|
|
||||||
// Empty response content, this could happen when there is only image data
|
|
||||||
finalResponse = ''
|
|
||||||
} else {
|
} else {
|
||||||
finalResponse = JSON.stringify(response, null, 2)
|
finalResponse = JSON.stringify(response, null, 2)
|
||||||
}
|
}
|
||||||
const output = this.prepareOutputObject(
|
const output = this.prepareOutputObject(response, finalResponse, startTime, endTime, timeDelta, isStructuredOutput)
|
||||||
response,
|
|
||||||
finalResponse,
|
|
||||||
startTime,
|
|
||||||
endTime,
|
|
||||||
timeDelta,
|
|
||||||
isStructuredOutput,
|
|
||||||
artifacts,
|
|
||||||
fileAnnotations
|
|
||||||
)
|
|
||||||
|
|
||||||
// End analytics tracking
|
// End analytics tracking
|
||||||
if (analyticHandlers && llmIds) {
|
if (analyticHandlers && llmIds) {
|
||||||
|
|
@ -584,23 +528,12 @@ class LLM_Agentflow implements INode {
|
||||||
this.sendStreamingEvents(options, chatId, response)
|
this.sendStreamingEvents(options, chatId, response)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stream file annotations if any were extracted
|
|
||||||
if (fileAnnotations.length > 0 && isLastNode && sseStreamer) {
|
|
||||||
sseStreamer.streamFileAnnotationsEvent(chatId, fileAnnotations)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process template variables in state
|
// Process template variables in state
|
||||||
newState = processTemplateVariables(newState, finalResponse)
|
newState = processTemplateVariables(newState, finalResponse)
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove the temporarily added image artifact messages before storing
|
|
||||||
* This is to avoid storing the actual base64 data into database
|
|
||||||
*/
|
|
||||||
const messagesToStore = messages.filter((msg: any) => !msg._isTemporaryImageMessage)
|
|
||||||
|
|
||||||
// Replace the actual messages array with one that includes the file references for images instead of base64 data
|
// Replace the actual messages array with one that includes the file references for images instead of base64 data
|
||||||
const messagesWithFileReferences = replaceBase64ImagesWithFileReferences(
|
const messagesWithFileReferences = replaceBase64ImagesWithFileReferences(
|
||||||
messagesToStore,
|
messages,
|
||||||
runtimeImageMessagesWithFileRef,
|
runtimeImageMessagesWithFileRef,
|
||||||
pastImageMessagesWithFileRef
|
pastImageMessagesWithFileRef
|
||||||
)
|
)
|
||||||
|
|
@ -651,13 +584,7 @@ class LLM_Agentflow implements INode {
|
||||||
{
|
{
|
||||||
role: returnRole,
|
role: returnRole,
|
||||||
content: finalResponse,
|
content: finalResponse,
|
||||||
name: nodeData?.label ? nodeData?.label.toLowerCase().replace(/\s/g, '_').trim() : nodeData?.id,
|
name: nodeData?.label ? nodeData?.label.toLowerCase().replace(/\s/g, '_').trim() : nodeData?.id
|
||||||
...(((artifacts && artifacts.length > 0) || (fileAnnotations && fileAnnotations.length > 0)) && {
|
|
||||||
additional_kwargs: {
|
|
||||||
...(artifacts && artifacts.length > 0 && { artifacts }),
|
|
||||||
...(fileAnnotations && fileAnnotations.length > 0 && { fileAnnotations })
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
@ -827,6 +754,59 @@ class LLM_Agentflow implements INode {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configures structured output for the LLM
|
||||||
|
*/
|
||||||
|
private configureStructuredOutput(llmNodeInstance: BaseChatModel, llmStructuredOutput: IStructuredOutput[]): BaseChatModel {
|
||||||
|
try {
|
||||||
|
const zodObj: ICommonObject = {}
|
||||||
|
for (const sch of llmStructuredOutput) {
|
||||||
|
if (sch.type === 'string') {
|
||||||
|
zodObj[sch.key] = z.string().describe(sch.description || '')
|
||||||
|
} else if (sch.type === 'stringArray') {
|
||||||
|
zodObj[sch.key] = z.array(z.string()).describe(sch.description || '')
|
||||||
|
} else if (sch.type === 'number') {
|
||||||
|
zodObj[sch.key] = z.number().describe(sch.description || '')
|
||||||
|
} else if (sch.type === 'boolean') {
|
||||||
|
zodObj[sch.key] = z.boolean().describe(sch.description || '')
|
||||||
|
} else if (sch.type === 'enum') {
|
||||||
|
const enumValues = sch.enumValues?.split(',').map((item: string) => item.trim()) || []
|
||||||
|
zodObj[sch.key] = z
|
||||||
|
.enum(enumValues.length ? (enumValues as [string, ...string[]]) : ['default'])
|
||||||
|
.describe(sch.description || '')
|
||||||
|
} else if (sch.type === 'jsonArray') {
|
||||||
|
const jsonSchema = sch.jsonSchema
|
||||||
|
if (jsonSchema) {
|
||||||
|
try {
|
||||||
|
// Parse the JSON schema
|
||||||
|
const schemaObj = JSON.parse(jsonSchema)
|
||||||
|
|
||||||
|
// Create a Zod schema from the JSON schema
|
||||||
|
const itemSchema = this.createZodSchemaFromJSON(schemaObj)
|
||||||
|
|
||||||
|
// Create an array schema of the item schema
|
||||||
|
zodObj[sch.key] = z.array(itemSchema).describe(sch.description || '')
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`Error parsing JSON schema for ${sch.key}:`, err)
|
||||||
|
// Fallback to generic array of records
|
||||||
|
zodObj[sch.key] = z.array(z.record(z.any())).describe(sch.description || '')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If no schema provided, use generic array of records
|
||||||
|
zodObj[sch.key] = z.array(z.record(z.any())).describe(sch.description || '')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const structuredOutput = z.object(zodObj)
|
||||||
|
|
||||||
|
// @ts-ignore
|
||||||
|
return llmNodeInstance.withStructuredOutput(structuredOutput)
|
||||||
|
} catch (exception) {
|
||||||
|
console.error(exception)
|
||||||
|
return llmNodeInstance
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handles streaming response from the LLM
|
* Handles streaming response from the LLM
|
||||||
*/
|
*/
|
||||||
|
|
@ -843,20 +823,16 @@ class LLM_Agentflow implements INode {
|
||||||
for await (const chunk of await llmNodeInstance.stream(messages, { signal: abortController?.signal })) {
|
for await (const chunk of await llmNodeInstance.stream(messages, { signal: abortController?.signal })) {
|
||||||
if (sseStreamer) {
|
if (sseStreamer) {
|
||||||
let content = ''
|
let content = ''
|
||||||
|
if (Array.isArray(chunk.content) && chunk.content.length > 0) {
|
||||||
if (typeof chunk === 'string') {
|
|
||||||
content = chunk
|
|
||||||
} else if (Array.isArray(chunk.content) && chunk.content.length > 0) {
|
|
||||||
const contents = chunk.content as MessageContentText[]
|
const contents = chunk.content as MessageContentText[]
|
||||||
content = contents.map((item) => item.text).join('')
|
content = contents.map((item) => item.text).join('')
|
||||||
} else if (chunk.content) {
|
} else {
|
||||||
content = chunk.content.toString()
|
content = chunk.content.toString()
|
||||||
}
|
}
|
||||||
sseStreamer.streamTokenEvent(chatId, content)
|
sseStreamer.streamTokenEvent(chatId, content)
|
||||||
}
|
}
|
||||||
|
|
||||||
const messageChunk = typeof chunk === 'string' ? new AIMessageChunk(chunk) : chunk
|
response = response.concat(chunk)
|
||||||
response = response.concat(messageChunk)
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error during streaming:', error)
|
console.error('Error during streaming:', error)
|
||||||
|
|
@ -878,9 +854,7 @@ class LLM_Agentflow implements INode {
|
||||||
startTime: number,
|
startTime: number,
|
||||||
endTime: number,
|
endTime: number,
|
||||||
timeDelta: number,
|
timeDelta: number,
|
||||||
isStructuredOutput: boolean,
|
isStructuredOutput: boolean
|
||||||
artifacts: any[] = [],
|
|
||||||
fileAnnotations: any[] = []
|
|
||||||
): any {
|
): any {
|
||||||
const output: any = {
|
const output: any = {
|
||||||
content: finalResponse,
|
content: finalResponse,
|
||||||
|
|
@ -899,10 +873,6 @@ class LLM_Agentflow implements INode {
|
||||||
output.usageMetadata = response.usage_metadata
|
output.usageMetadata = response.usage_metadata
|
||||||
}
|
}
|
||||||
|
|
||||||
if (response.response_metadata) {
|
|
||||||
output.responseMetadata = response.response_metadata
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isStructuredOutput && typeof response === 'object') {
|
if (isStructuredOutput && typeof response === 'object') {
|
||||||
const structuredOutput = response as Record<string, any>
|
const structuredOutput = response as Record<string, any>
|
||||||
for (const key in structuredOutput) {
|
for (const key in structuredOutput) {
|
||||||
|
|
@ -912,14 +882,6 @@ class LLM_Agentflow implements INode {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (artifacts && artifacts.length > 0) {
|
|
||||||
output.artifacts = flatten(artifacts)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fileAnnotations && fileAnnotations.length > 0) {
|
|
||||||
output.fileAnnotations = fileAnnotations
|
|
||||||
}
|
|
||||||
|
|
||||||
return output
|
return output
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -930,12 +892,7 @@ class LLM_Agentflow implements INode {
|
||||||
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
|
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
|
||||||
|
|
||||||
if (response.tool_calls) {
|
if (response.tool_calls) {
|
||||||
const formattedToolCalls = response.tool_calls.map((toolCall: any) => ({
|
sseStreamer.streamCalledToolsEvent(chatId, response.tool_calls)
|
||||||
tool: toolCall.name || 'tool',
|
|
||||||
toolInput: toolCall.args,
|
|
||||||
toolOutput: ''
|
|
||||||
}))
|
|
||||||
sseStreamer.streamCalledToolsEvent(chatId, flatten(formattedToolCalls))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (response.usage_metadata) {
|
if (response.usage_metadata) {
|
||||||
|
|
@ -944,6 +901,107 @@ class LLM_Agentflow implements INode {
|
||||||
|
|
||||||
sseStreamer.streamEndEvent(chatId)
|
sseStreamer.streamEndEvent(chatId)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a Zod schema from a JSON schema object
|
||||||
|
* @param jsonSchema The JSON schema object
|
||||||
|
* @returns A Zod schema
|
||||||
|
*/
|
||||||
|
private createZodSchemaFromJSON(jsonSchema: any): z.ZodTypeAny {
|
||||||
|
// If the schema is an object with properties, create an object schema
|
||||||
|
if (typeof jsonSchema === 'object' && jsonSchema !== null) {
|
||||||
|
const schemaObj: Record<string, z.ZodTypeAny> = {}
|
||||||
|
|
||||||
|
// Process each property in the schema
|
||||||
|
for (const [key, value] of Object.entries(jsonSchema)) {
|
||||||
|
if (value === null) {
|
||||||
|
// Handle null values
|
||||||
|
schemaObj[key] = z.null()
|
||||||
|
} else if (typeof value === 'object' && !Array.isArray(value)) {
|
||||||
|
// Check if the property has a type definition
|
||||||
|
if ('type' in value) {
|
||||||
|
const type = value.type as string
|
||||||
|
const description = ('description' in value ? (value.description as string) : '') || ''
|
||||||
|
|
||||||
|
// Create the appropriate Zod type based on the type property
|
||||||
|
if (type === 'string') {
|
||||||
|
schemaObj[key] = z.string().describe(description)
|
||||||
|
} else if (type === 'number') {
|
||||||
|
schemaObj[key] = z.number().describe(description)
|
||||||
|
} else if (type === 'boolean') {
|
||||||
|
schemaObj[key] = z.boolean().describe(description)
|
||||||
|
} else if (type === 'array') {
|
||||||
|
// If it's an array type, check if items is defined
|
||||||
|
if ('items' in value && value.items) {
|
||||||
|
const itemSchema = this.createZodSchemaFromJSON(value.items)
|
||||||
|
schemaObj[key] = z.array(itemSchema).describe(description)
|
||||||
|
} else {
|
||||||
|
// Default to array of any if items not specified
|
||||||
|
schemaObj[key] = z.array(z.any()).describe(description)
|
||||||
|
}
|
||||||
|
} else if (type === 'object') {
|
||||||
|
// If it's an object type, check if properties is defined
|
||||||
|
if ('properties' in value && value.properties) {
|
||||||
|
const nestedSchema = this.createZodSchemaFromJSON(value.properties)
|
||||||
|
schemaObj[key] = nestedSchema.describe(description)
|
||||||
|
} else {
|
||||||
|
// Default to record of any if properties not specified
|
||||||
|
schemaObj[key] = z.record(z.any()).describe(description)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Default to any for unknown types
|
||||||
|
schemaObj[key] = z.any().describe(description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the property is optional
|
||||||
|
if ('optional' in value && value.optional === true) {
|
||||||
|
schemaObj[key] = schemaObj[key].optional()
|
||||||
|
}
|
||||||
|
} else if (Array.isArray(value)) {
|
||||||
|
// Array values without a type property
|
||||||
|
if (value.length > 0) {
|
||||||
|
// If the array has items, recursively create a schema for the first item
|
||||||
|
const itemSchema = this.createZodSchemaFromJSON(value[0])
|
||||||
|
schemaObj[key] = z.array(itemSchema)
|
||||||
|
} else {
|
||||||
|
// Empty array, allow any array
|
||||||
|
schemaObj[key] = z.array(z.any())
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// It's a nested object without a type property, recursively create schema
|
||||||
|
schemaObj[key] = this.createZodSchemaFromJSON(value)
|
||||||
|
}
|
||||||
|
} else if (Array.isArray(value)) {
|
||||||
|
// Array values
|
||||||
|
if (value.length > 0) {
|
||||||
|
// If the array has items, recursively create a schema for the first item
|
||||||
|
const itemSchema = this.createZodSchemaFromJSON(value[0])
|
||||||
|
schemaObj[key] = z.array(itemSchema)
|
||||||
|
} else {
|
||||||
|
// Empty array, allow any array
|
||||||
|
schemaObj[key] = z.array(z.any())
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// For primitive values (which shouldn't be in the schema directly)
|
||||||
|
// Use the corresponding Zod type
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
schemaObj[key] = z.string()
|
||||||
|
} else if (typeof value === 'number') {
|
||||||
|
schemaObj[key] = z.number()
|
||||||
|
} else if (typeof value === 'boolean') {
|
||||||
|
schemaObj[key] = z.boolean()
|
||||||
|
} else {
|
||||||
|
schemaObj[key] = z.any()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return z.object(schemaObj)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback to any for unknown types
|
||||||
|
return z.any()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { nodeClass: LLM_Agentflow }
|
module.exports = { nodeClass: LLM_Agentflow }
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,4 @@
|
||||||
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
|
||||||
import { updateFlowState } from '../utils'
|
|
||||||
|
|
||||||
class Loop_Agentflow implements INode {
|
class Loop_Agentflow implements INode {
|
||||||
label: string
|
label: string
|
||||||
|
|
@ -20,7 +19,7 @@ class Loop_Agentflow implements INode {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.label = 'Loop'
|
this.label = 'Loop'
|
||||||
this.name = 'loopAgentflow'
|
this.name = 'loopAgentflow'
|
||||||
this.version = 1.2
|
this.version = 1.0
|
||||||
this.type = 'Loop'
|
this.type = 'Loop'
|
||||||
this.category = 'Agent Flows'
|
this.category = 'Agent Flows'
|
||||||
this.description = 'Loop back to a previous node'
|
this.description = 'Loop back to a previous node'
|
||||||
|
|
@ -41,39 +40,6 @@ class Loop_Agentflow implements INode {
|
||||||
name: 'maxLoopCount',
|
name: 'maxLoopCount',
|
||||||
type: 'number',
|
type: 'number',
|
||||||
default: 5
|
default: 5
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Fallback Message',
|
|
||||||
name: 'fallbackMessage',
|
|
||||||
type: 'string',
|
|
||||||
description: 'Message to display if the loop count is exceeded',
|
|
||||||
placeholder: 'Enter your fallback message here',
|
|
||||||
rows: 4,
|
|
||||||
acceptVariable: true,
|
|
||||||
optional: true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Update Flow State',
|
|
||||||
name: 'loopUpdateState',
|
|
||||||
description: 'Update runtime state during the execution of the workflow',
|
|
||||||
type: 'array',
|
|
||||||
optional: true,
|
|
||||||
acceptVariable: true,
|
|
||||||
array: [
|
|
||||||
{
|
|
||||||
label: 'Key',
|
|
||||||
name: 'key',
|
|
||||||
type: 'asyncOptions',
|
|
||||||
loadMethod: 'listRuntimeStateKeys'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Value',
|
|
||||||
name: 'value',
|
|
||||||
type: 'string',
|
|
||||||
acceptVariable: true,
|
|
||||||
acceptNodeOutputAsVariable: true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
@ -92,20 +58,12 @@ class Loop_Agentflow implements INode {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return returnOptions
|
return returnOptions
|
||||||
},
|
|
||||||
async listRuntimeStateKeys(_: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
|
|
||||||
const previousNodes = options.previousNodes as ICommonObject[]
|
|
||||||
const startAgentflowNode = previousNodes.find((node) => node.name === 'startAgentflow')
|
|
||||||
const state = startAgentflowNode?.inputs?.startState as ICommonObject[]
|
|
||||||
return state.map((item) => ({ label: item.key, name: item.key }))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async run(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
async run(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||||
const loopBackToNode = nodeData.inputs?.loopBackToNode as string
|
const loopBackToNode = nodeData.inputs?.loopBackToNode as string
|
||||||
const _maxLoopCount = nodeData.inputs?.maxLoopCount as string
|
const _maxLoopCount = nodeData.inputs?.maxLoopCount as string
|
||||||
const fallbackMessage = nodeData.inputs?.fallbackMessage as string
|
|
||||||
const _loopUpdateState = nodeData.inputs?.loopUpdateState
|
|
||||||
|
|
||||||
const state = options.agentflowRuntime?.state as ICommonObject
|
const state = options.agentflowRuntime?.state as ICommonObject
|
||||||
|
|
||||||
|
|
@ -117,34 +75,16 @@ class Loop_Agentflow implements INode {
|
||||||
maxLoopCount: _maxLoopCount ? parseInt(_maxLoopCount) : 5
|
maxLoopCount: _maxLoopCount ? parseInt(_maxLoopCount) : 5
|
||||||
}
|
}
|
||||||
|
|
||||||
const finalOutput = 'Loop back to ' + `${loopBackToNodeLabel} (${loopBackToNodeId})`
|
|
||||||
|
|
||||||
// Update flow state if needed
|
|
||||||
let newState = { ...state }
|
|
||||||
if (_loopUpdateState && Array.isArray(_loopUpdateState) && _loopUpdateState.length > 0) {
|
|
||||||
newState = updateFlowState(state, _loopUpdateState)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process template variables in state
|
|
||||||
if (newState && Object.keys(newState).length > 0) {
|
|
||||||
for (const key in newState) {
|
|
||||||
if (newState[key].toString().includes('{{ output }}')) {
|
|
||||||
newState[key] = finalOutput
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const returnOutput = {
|
const returnOutput = {
|
||||||
id: nodeData.id,
|
id: nodeData.id,
|
||||||
name: this.name,
|
name: this.name,
|
||||||
input: data,
|
input: data,
|
||||||
output: {
|
output: {
|
||||||
content: finalOutput,
|
content: 'Loop back to ' + `${loopBackToNodeLabel} (${loopBackToNodeId})`,
|
||||||
nodeID: loopBackToNodeId,
|
nodeID: loopBackToNodeId,
|
||||||
maxLoopCount: _maxLoopCount ? parseInt(_maxLoopCount) : 5,
|
maxLoopCount: _maxLoopCount ? parseInt(_maxLoopCount) : 5
|
||||||
fallbackMessage
|
|
||||||
},
|
},
|
||||||
state: newState
|
state
|
||||||
}
|
}
|
||||||
|
|
||||||
return returnOutput
|
return returnOutput
|
||||||
|
|
|
||||||
|
|
@ -36,7 +36,7 @@ class Retriever_Agentflow implements INode {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.label = 'Retriever'
|
this.label = 'Retriever'
|
||||||
this.name = 'retrieverAgentflow'
|
this.name = 'retrieverAgentflow'
|
||||||
this.version = 1.1
|
this.version = 1.0
|
||||||
this.type = 'Retriever'
|
this.type = 'Retriever'
|
||||||
this.category = 'Agent Flows'
|
this.category = 'Agent Flows'
|
||||||
this.description = 'Retrieve information from vector database'
|
this.description = 'Retrieve information from vector database'
|
||||||
|
|
@ -87,7 +87,8 @@ class Retriever_Agentflow implements INode {
|
||||||
label: 'Key',
|
label: 'Key',
|
||||||
name: 'key',
|
name: 'key',
|
||||||
type: 'asyncOptions',
|
type: 'asyncOptions',
|
||||||
loadMethod: 'listRuntimeStateKeys'
|
loadMethod: 'listRuntimeStateKeys',
|
||||||
|
freeSolo: true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Value',
|
label: 'Value',
|
||||||
|
|
|
||||||
|
|
@ -29,7 +29,7 @@ class Tool_Agentflow implements INode {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.label = 'Tool'
|
this.label = 'Tool'
|
||||||
this.name = 'toolAgentflow'
|
this.name = 'toolAgentflow'
|
||||||
this.version = 1.2
|
this.version = 1.1
|
||||||
this.type = 'Tool'
|
this.type = 'Tool'
|
||||||
this.category = 'Agent Flows'
|
this.category = 'Agent Flows'
|
||||||
this.description = 'Tools allow LLM to interact with external systems'
|
this.description = 'Tools allow LLM to interact with external systems'
|
||||||
|
|
@ -80,7 +80,8 @@ class Tool_Agentflow implements INode {
|
||||||
label: 'Key',
|
label: 'Key',
|
||||||
name: 'key',
|
name: 'key',
|
||||||
type: 'asyncOptions',
|
type: 'asyncOptions',
|
||||||
loadMethod: 'listRuntimeStateKeys'
|
loadMethod: 'listRuntimeStateKeys',
|
||||||
|
freeSolo: true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Value',
|
label: 'Value',
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,10 @@
|
||||||
import { BaseMessage, MessageContentImageUrl, AIMessageChunk } from '@langchain/core/messages'
|
import { BaseMessage, MessageContentImageUrl } from '@langchain/core/messages'
|
||||||
import { getImageUploads } from '../../src/multiModalUtils'
|
import { getImageUploads } from '../../src/multiModalUtils'
|
||||||
import { addSingleFileToStorage, getFileFromStorage } from '../../src/storageUtils'
|
import { getFileFromStorage } from '../../src/storageUtils'
|
||||||
import { ICommonObject, IFileUpload, INodeData } from '../../src/Interface'
|
import { ICommonObject, IFileUpload } from '../../src/Interface'
|
||||||
import { BaseMessageLike } from '@langchain/core/messages'
|
import { BaseMessageLike } from '@langchain/core/messages'
|
||||||
import { IFlowState } from './Interface.Agentflow'
|
import { IFlowState } from './Interface.Agentflow'
|
||||||
import { getCredentialData, getCredentialParam, handleEscapeCharacters, mapMimeTypeToInputField } from '../../src/utils'
|
import { handleEscapeCharacters, mapMimeTypeToInputField } from '../../src/utils'
|
||||||
import fetch from 'node-fetch'
|
|
||||||
|
|
||||||
export const addImagesToMessages = async (
|
export const addImagesToMessages = async (
|
||||||
options: ICommonObject,
|
options: ICommonObject,
|
||||||
|
|
@ -19,8 +18,7 @@ export const addImagesToMessages = async (
|
||||||
for (const upload of imageUploads) {
|
for (const upload of imageUploads) {
|
||||||
let bf = upload.data
|
let bf = upload.data
|
||||||
if (upload.type == 'stored-file') {
|
if (upload.type == 'stored-file') {
|
||||||
const fileName = upload.name.replace(/^FILE-STORAGE::/, '')
|
const contents = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId)
|
||||||
const contents = await getFileFromStorage(fileName, options.orgId, options.chatflowid, options.chatId)
|
|
||||||
// as the image is stored in the server, read the file and convert it to base64
|
// as the image is stored in the server, read the file and convert it to base64
|
||||||
bf = 'data:' + upload.mime + ';base64,' + contents.toString('base64')
|
bf = 'data:' + upload.mime + ';base64,' + contents.toString('base64')
|
||||||
|
|
||||||
|
|
@ -91,9 +89,8 @@ export const processMessagesWithImages = async (
|
||||||
if (item.type === 'stored-file' && item.name && item.mime.startsWith('image/')) {
|
if (item.type === 'stored-file' && item.name && item.mime.startsWith('image/')) {
|
||||||
hasImageReferences = true
|
hasImageReferences = true
|
||||||
try {
|
try {
|
||||||
const fileName = item.name.replace(/^FILE-STORAGE::/, '')
|
|
||||||
// Get file contents from storage
|
// Get file contents from storage
|
||||||
const contents = await getFileFromStorage(fileName, options.orgId, options.chatflowid, options.chatId)
|
const contents = await getFileFromStorage(item.name, options.orgId, options.chatflowid, options.chatId)
|
||||||
|
|
||||||
// Create base64 data URL
|
// Create base64 data URL
|
||||||
const base64Data = 'data:' + item.mime + ';base64,' + contents.toString('base64')
|
const base64Data = 'data:' + item.mime + ';base64,' + contents.toString('base64')
|
||||||
|
|
@ -325,8 +322,7 @@ export const getPastChatHistoryImageMessages = async (
|
||||||
const imageContents: MessageContentImageUrl[] = []
|
const imageContents: MessageContentImageUrl[] = []
|
||||||
for (const upload of uploads) {
|
for (const upload of uploads) {
|
||||||
if (upload.type === 'stored-file' && upload.mime.startsWith('image/')) {
|
if (upload.type === 'stored-file' && upload.mime.startsWith('image/')) {
|
||||||
const fileName = upload.name.replace(/^FILE-STORAGE::/, '')
|
const fileData = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId)
|
||||||
const fileData = await getFileFromStorage(fileName, options.orgId, options.chatflowid, options.chatId)
|
|
||||||
// as the image is stored in the server, read the file and convert it to base64
|
// as the image is stored in the server, read the file and convert it to base64
|
||||||
const bf = 'data:' + upload.mime + ';base64,' + fileData.toString('base64')
|
const bf = 'data:' + upload.mime + ';base64,' + fileData.toString('base64')
|
||||||
|
|
||||||
|
|
@ -460,437 +456,6 @@ export const getPastChatHistoryImageMessages = async (
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets MIME type from filename extension
|
|
||||||
*/
|
|
||||||
export const getMimeTypeFromFilename = (filename: string): string => {
|
|
||||||
const extension = filename.toLowerCase().split('.').pop()
|
|
||||||
const mimeTypes: { [key: string]: string } = {
|
|
||||||
png: 'image/png',
|
|
||||||
jpg: 'image/jpeg',
|
|
||||||
jpeg: 'image/jpeg',
|
|
||||||
gif: 'image/gif',
|
|
||||||
pdf: 'application/pdf',
|
|
||||||
txt: 'text/plain',
|
|
||||||
csv: 'text/csv',
|
|
||||||
json: 'application/json',
|
|
||||||
html: 'text/html',
|
|
||||||
xml: 'application/xml'
|
|
||||||
}
|
|
||||||
return mimeTypes[extension || ''] || 'application/octet-stream'
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets artifact type from filename extension for UI rendering
|
|
||||||
*/
|
|
||||||
export const getArtifactTypeFromFilename = (filename: string): string => {
|
|
||||||
const extension = filename.toLowerCase().split('.').pop()
|
|
||||||
const artifactTypes: { [key: string]: string } = {
|
|
||||||
png: 'png',
|
|
||||||
jpg: 'jpeg',
|
|
||||||
jpeg: 'jpeg',
|
|
||||||
html: 'html',
|
|
||||||
htm: 'html',
|
|
||||||
md: 'markdown',
|
|
||||||
markdown: 'markdown',
|
|
||||||
json: 'json',
|
|
||||||
js: 'javascript',
|
|
||||||
javascript: 'javascript',
|
|
||||||
tex: 'latex',
|
|
||||||
latex: 'latex',
|
|
||||||
txt: 'text',
|
|
||||||
csv: 'text',
|
|
||||||
pdf: 'text'
|
|
||||||
}
|
|
||||||
return artifactTypes[extension || ''] || 'text'
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Saves base64 image data to storage and returns file information
|
|
||||||
*/
|
|
||||||
export const saveBase64Image = async (
|
|
||||||
outputItem: any,
|
|
||||||
options: ICommonObject
|
|
||||||
): Promise<{ filePath: string; fileName: string; totalSize: number } | null> => {
|
|
||||||
try {
|
|
||||||
if (!outputItem.result) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract base64 data and create buffer
|
|
||||||
const base64Data = outputItem.result
|
|
||||||
const imageBuffer = Buffer.from(base64Data, 'base64')
|
|
||||||
|
|
||||||
// Determine file extension and MIME type
|
|
||||||
const outputFormat = outputItem.output_format || 'png'
|
|
||||||
const fileName = `generated_image_${outputItem.id || Date.now()}.${outputFormat}`
|
|
||||||
const mimeType = outputFormat === 'png' ? 'image/png' : 'image/jpeg'
|
|
||||||
|
|
||||||
// Save the image using the existing storage utility
|
|
||||||
const { path, totalSize } = await addSingleFileToStorage(
|
|
||||||
mimeType,
|
|
||||||
imageBuffer,
|
|
||||||
fileName,
|
|
||||||
options.orgId,
|
|
||||||
options.chatflowid,
|
|
||||||
options.chatId
|
|
||||||
)
|
|
||||||
|
|
||||||
return { filePath: path, fileName, totalSize }
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error saving base64 image:', error)
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Saves Gemini inline image data to storage and returns file information
|
|
||||||
*/
|
|
||||||
export const saveGeminiInlineImage = async (
|
|
||||||
inlineItem: any,
|
|
||||||
options: ICommonObject
|
|
||||||
): Promise<{ filePath: string; fileName: string; totalSize: number } | null> => {
|
|
||||||
try {
|
|
||||||
if (!inlineItem.data || !inlineItem.mimeType) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract base64 data and create buffer
|
|
||||||
const base64Data = inlineItem.data
|
|
||||||
const imageBuffer = Buffer.from(base64Data, 'base64')
|
|
||||||
|
|
||||||
// Determine file extension from MIME type
|
|
||||||
const mimeType = inlineItem.mimeType
|
|
||||||
let extension = 'png'
|
|
||||||
if (mimeType.includes('jpeg') || mimeType.includes('jpg')) {
|
|
||||||
extension = 'jpg'
|
|
||||||
} else if (mimeType.includes('png')) {
|
|
||||||
extension = 'png'
|
|
||||||
} else if (mimeType.includes('gif')) {
|
|
||||||
extension = 'gif'
|
|
||||||
} else if (mimeType.includes('webp')) {
|
|
||||||
extension = 'webp'
|
|
||||||
}
|
|
||||||
|
|
||||||
const fileName = `gemini_generated_image_${Date.now()}.${extension}`
|
|
||||||
|
|
||||||
// Save the image using the existing storage utility
|
|
||||||
const { path, totalSize } = await addSingleFileToStorage(
|
|
||||||
mimeType,
|
|
||||||
imageBuffer,
|
|
||||||
fileName,
|
|
||||||
options.orgId,
|
|
||||||
options.chatflowid,
|
|
||||||
options.chatId
|
|
||||||
)
|
|
||||||
|
|
||||||
return { filePath: path, fileName, totalSize }
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error saving Gemini inline image:', error)
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Downloads file content from container file citation
|
|
||||||
*/
|
|
||||||
export const downloadContainerFile = async (
|
|
||||||
containerId: string,
|
|
||||||
fileId: string,
|
|
||||||
filename: string,
|
|
||||||
modelNodeData: INodeData,
|
|
||||||
options: ICommonObject
|
|
||||||
): Promise<{ filePath: string; totalSize: number } | null> => {
|
|
||||||
try {
|
|
||||||
const credentialData = await getCredentialData(modelNodeData.credential ?? '', options)
|
|
||||||
const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, modelNodeData)
|
|
||||||
|
|
||||||
if (!openAIApiKey) {
|
|
||||||
console.warn('No OpenAI API key available for downloading container file')
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
// Download the file using OpenAI Container API
|
|
||||||
const response = await fetch(`https://api.openai.com/v1/containers/${containerId}/files/${fileId}/content`, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Accept: '*/*',
|
|
||||||
Authorization: `Bearer ${openAIApiKey}`
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
console.warn(
|
|
||||||
`Failed to download container file ${fileId} from container ${containerId}: ${response.status} ${response.statusText}`
|
|
||||||
)
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract the binary data from the Response object
|
|
||||||
const data = await response.arrayBuffer()
|
|
||||||
const dataBuffer = Buffer.from(data)
|
|
||||||
const mimeType = getMimeTypeFromFilename(filename)
|
|
||||||
|
|
||||||
// Store the file using the same storage utility as OpenAIAssistant
|
|
||||||
const { path, totalSize } = await addSingleFileToStorage(
|
|
||||||
mimeType,
|
|
||||||
dataBuffer,
|
|
||||||
filename,
|
|
||||||
options.orgId,
|
|
||||||
options.chatflowid,
|
|
||||||
options.chatId
|
|
||||||
)
|
|
||||||
|
|
||||||
return { filePath: path, totalSize }
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error downloading container file:', error)
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Replace inlineData base64 with file references in the response content
|
|
||||||
*/
|
|
||||||
export const replaceInlineDataWithFileReferences = (
|
|
||||||
response: AIMessageChunk,
|
|
||||||
savedInlineImages: Array<{ filePath: string; fileName: string; mimeType: string }>
|
|
||||||
): void => {
|
|
||||||
// Check if content is an array
|
|
||||||
if (!Array.isArray(response.content)) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Replace base64 data with file references in response content
|
|
||||||
let savedImageIndex = 0
|
|
||||||
for (let i = 0; i < response.content.length; i++) {
|
|
||||||
const contentItem = response.content[i]
|
|
||||||
if (
|
|
||||||
typeof contentItem === 'object' &&
|
|
||||||
contentItem.type === 'inlineData' &&
|
|
||||||
contentItem.inlineData &&
|
|
||||||
savedImageIndex < savedInlineImages.length
|
|
||||||
) {
|
|
||||||
const savedImage = savedInlineImages[savedImageIndex]
|
|
||||||
// Replace with file reference
|
|
||||||
response.content[i] = {
|
|
||||||
type: 'stored-file',
|
|
||||||
name: savedImage.fileName,
|
|
||||||
mime: savedImage.mimeType,
|
|
||||||
path: savedImage.filePath
|
|
||||||
}
|
|
||||||
savedImageIndex++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clear the inlineData from response_metadata to avoid duplication
|
|
||||||
if (response.response_metadata?.inlineData) {
|
|
||||||
delete response.response_metadata.inlineData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extracts artifacts from response metadata (both annotations and built-in tools)
|
|
||||||
*/
|
|
||||||
export const extractArtifactsFromResponse = async (
|
|
||||||
responseMetadata: any,
|
|
||||||
modelNodeData: INodeData,
|
|
||||||
options: ICommonObject
|
|
||||||
): Promise<{
|
|
||||||
artifacts: any[]
|
|
||||||
fileAnnotations: any[]
|
|
||||||
savedInlineImages?: Array<{ filePath: string; fileName: string; mimeType: string }>
|
|
||||||
}> => {
|
|
||||||
const artifacts: any[] = []
|
|
||||||
const fileAnnotations: any[] = []
|
|
||||||
const savedInlineImages: Array<{ filePath: string; fileName: string; mimeType: string }> = []
|
|
||||||
|
|
||||||
// Handle Gemini inline data (image generation)
|
|
||||||
if (responseMetadata?.inlineData && Array.isArray(responseMetadata.inlineData)) {
|
|
||||||
for (const inlineItem of responseMetadata.inlineData) {
|
|
||||||
if (inlineItem.type === 'gemini_inline_data' && inlineItem.data && inlineItem.mimeType) {
|
|
||||||
try {
|
|
||||||
const savedImageResult = await saveGeminiInlineImage(inlineItem, options)
|
|
||||||
if (savedImageResult) {
|
|
||||||
// Create artifact in the same format as other image artifacts
|
|
||||||
const fileType = getArtifactTypeFromFilename(savedImageResult.fileName)
|
|
||||||
artifacts.push({
|
|
||||||
type: fileType,
|
|
||||||
data: savedImageResult.filePath
|
|
||||||
})
|
|
||||||
|
|
||||||
// Track saved image for replacing base64 data in content
|
|
||||||
savedInlineImages.push({
|
|
||||||
filePath: savedImageResult.filePath,
|
|
||||||
fileName: savedImageResult.fileName,
|
|
||||||
mimeType: inlineItem.mimeType
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error processing Gemini inline image artifact:', error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!responseMetadata?.output || !Array.isArray(responseMetadata.output)) {
|
|
||||||
return { artifacts, fileAnnotations, savedInlineImages: savedInlineImages.length > 0 ? savedInlineImages : undefined }
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const outputItem of responseMetadata.output) {
|
|
||||||
// Handle container file citations from annotations
|
|
||||||
if (outputItem.type === 'message' && outputItem.content && Array.isArray(outputItem.content)) {
|
|
||||||
for (const contentItem of outputItem.content) {
|
|
||||||
if (contentItem.annotations && Array.isArray(contentItem.annotations)) {
|
|
||||||
for (const annotation of contentItem.annotations) {
|
|
||||||
if (annotation.type === 'container_file_citation' && annotation.file_id && annotation.filename) {
|
|
||||||
try {
|
|
||||||
// Download and store the file content
|
|
||||||
const downloadResult = await downloadContainerFile(
|
|
||||||
annotation.container_id,
|
|
||||||
annotation.file_id,
|
|
||||||
annotation.filename,
|
|
||||||
modelNodeData,
|
|
||||||
options
|
|
||||||
)
|
|
||||||
|
|
||||||
if (downloadResult) {
|
|
||||||
const fileType = getArtifactTypeFromFilename(annotation.filename)
|
|
||||||
|
|
||||||
if (fileType === 'png' || fileType === 'jpeg' || fileType === 'jpg') {
|
|
||||||
const artifact = {
|
|
||||||
type: fileType,
|
|
||||||
data: downloadResult.filePath
|
|
||||||
}
|
|
||||||
|
|
||||||
artifacts.push(artifact)
|
|
||||||
} else {
|
|
||||||
fileAnnotations.push({
|
|
||||||
filePath: downloadResult.filePath,
|
|
||||||
fileName: annotation.filename
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error processing annotation:', error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle built-in tool artifacts (like image generation)
|
|
||||||
if (outputItem.type === 'image_generation_call' && outputItem.result) {
|
|
||||||
try {
|
|
||||||
const savedImageResult = await saveBase64Image(outputItem, options)
|
|
||||||
if (savedImageResult) {
|
|
||||||
// Replace the base64 result with the file path in the response metadata
|
|
||||||
outputItem.result = savedImageResult.filePath
|
|
||||||
|
|
||||||
// Create artifact in the same format as other image artifacts
|
|
||||||
const fileType = getArtifactTypeFromFilename(savedImageResult.fileName)
|
|
||||||
artifacts.push({
|
|
||||||
type: fileType,
|
|
||||||
data: savedImageResult.filePath
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error processing image generation artifact:', error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { artifacts, fileAnnotations, savedInlineImages: savedInlineImages.length > 0 ? savedInlineImages : undefined }
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add image artifacts from previous assistant messages as user messages
|
|
||||||
* This allows the LLM to see and reference the generated images in the conversation
|
|
||||||
* Messages are marked with a special flag for later removal
|
|
||||||
*/
|
|
||||||
export const addImageArtifactsToMessages = async (messages: BaseMessageLike[], options: ICommonObject): Promise<void> => {
|
|
||||||
const imageExtensions = ['png', 'jpg', 'jpeg', 'gif', 'webp']
|
|
||||||
const messagesToInsert: Array<{ index: number; message: any }> = []
|
|
||||||
|
|
||||||
// Iterate through messages to find assistant messages with image artifacts
|
|
||||||
for (let i = 0; i < messages.length; i++) {
|
|
||||||
const message = messages[i] as any
|
|
||||||
|
|
||||||
// Check if this is an assistant message with artifacts
|
|
||||||
if (
|
|
||||||
(message.role === 'assistant' || message.role === 'ai') &&
|
|
||||||
message.additional_kwargs?.artifacts &&
|
|
||||||
Array.isArray(message.additional_kwargs.artifacts)
|
|
||||||
) {
|
|
||||||
const artifacts = message.additional_kwargs.artifacts
|
|
||||||
const imageArtifacts: Array<{ type: string; name: string; mime: string }> = []
|
|
||||||
|
|
||||||
// Extract image artifacts
|
|
||||||
for (const artifact of artifacts) {
|
|
||||||
if (artifact.type && artifact.data) {
|
|
||||||
// Check if this is an image artifact by file type
|
|
||||||
if (imageExtensions.includes(artifact.type.toLowerCase())) {
|
|
||||||
// Extract filename from the file path
|
|
||||||
const fileName = artifact.data.split('/').pop() || artifact.data
|
|
||||||
const mimeType = `image/${artifact.type.toLowerCase()}`
|
|
||||||
|
|
||||||
imageArtifacts.push({
|
|
||||||
type: 'stored-file',
|
|
||||||
name: fileName,
|
|
||||||
mime: mimeType
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we found image artifacts, prepare to insert a user message after this assistant message
|
|
||||||
if (imageArtifacts.length > 0) {
|
|
||||||
// Check if the next message already contains these image artifacts to avoid duplicates
|
|
||||||
const nextMessage = messages[i + 1] as any
|
|
||||||
const shouldInsert =
|
|
||||||
!nextMessage ||
|
|
||||||
nextMessage.role !== 'user' ||
|
|
||||||
!Array.isArray(nextMessage.content) ||
|
|
||||||
!nextMessage.content.some(
|
|
||||||
(item: any) =>
|
|
||||||
(item.type === 'stored-file' || item.type === 'image_url') &&
|
|
||||||
imageArtifacts.some((artifact) => {
|
|
||||||
// Compare with and without FILE-STORAGE:: prefix
|
|
||||||
const artifactName = artifact.name.replace('FILE-STORAGE::', '')
|
|
||||||
const itemName = item.name?.replace('FILE-STORAGE::', '') || ''
|
|
||||||
return artifactName === itemName
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
if (shouldInsert) {
|
|
||||||
messagesToInsert.push({
|
|
||||||
index: i + 1,
|
|
||||||
message: {
|
|
||||||
role: 'user',
|
|
||||||
content: imageArtifacts,
|
|
||||||
_isTemporaryImageMessage: true // Mark for later removal
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert messages in reverse order to maintain correct indices
|
|
||||||
for (let i = messagesToInsert.length - 1; i >= 0; i--) {
|
|
||||||
const { index, message } = messagesToInsert[i]
|
|
||||||
messages.splice(index, 0, message)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert stored-file references to base64 image_url format
|
|
||||||
if (messagesToInsert.length > 0) {
|
|
||||||
const { updatedMessages } = await processMessagesWithImages(messages, options)
|
|
||||||
// Replace the messages array content with the updated messages
|
|
||||||
messages.length = 0
|
|
||||||
messages.push(...updatedMessages)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Updates the flow state with new values
|
* Updates the flow state with new values
|
||||||
*/
|
*/
|
||||||
|
|
|
||||||
|
|
@ -183,7 +183,7 @@ json.dumps(my_dict)`
|
||||||
// TODO: get print console output
|
// TODO: get print console output
|
||||||
finalResult = await pyodide.runPythonAsync(code)
|
finalResult = await pyodide.runPythonAsync(code)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(`Sorry, I'm unable to find answer for question: "${input}" using following code: "${pythonCode}"`)
|
throw new Error(`Sorry, I'm unable to find answer for question: "${input}" using follwoing code: "${pythonCode}"`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ import { RunnableSequence } from '@langchain/core/runnables'
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
||||||
import { ChatPromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, PromptTemplate } from '@langchain/core/prompts'
|
import { ChatPromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, PromptTemplate } from '@langchain/core/prompts'
|
||||||
import { formatToOpenAIToolMessages } from 'langchain/agents/format_scratchpad/openai_tools'
|
import { formatToOpenAIToolMessages } from 'langchain/agents/format_scratchpad/openai_tools'
|
||||||
import { getBaseClasses, transformBracesWithColon, convertChatHistoryToText, convertBaseMessagetoIMessage } from '../../../src/utils'
|
import { getBaseClasses, transformBracesWithColon } from '../../../src/utils'
|
||||||
import { type ToolsAgentStep } from 'langchain/agents/openai/output_parser'
|
import { type ToolsAgentStep } from 'langchain/agents/openai/output_parser'
|
||||||
import {
|
import {
|
||||||
FlowiseMemory,
|
FlowiseMemory,
|
||||||
|
|
@ -23,10 +23,8 @@ import { Moderation, checkInputs, streamResponse } from '../../moderation/Modera
|
||||||
import { formatResponse } from '../../outputparsers/OutputParserHelpers'
|
import { formatResponse } from '../../outputparsers/OutputParserHelpers'
|
||||||
import type { Document } from '@langchain/core/documents'
|
import type { Document } from '@langchain/core/documents'
|
||||||
import { BaseRetriever } from '@langchain/core/retrievers'
|
import { BaseRetriever } from '@langchain/core/retrievers'
|
||||||
import { RESPONSE_TEMPLATE, REPHRASE_TEMPLATE } from '../../chains/ConversationalRetrievalQAChain/prompts'
|
import { RESPONSE_TEMPLATE } from '../../chains/ConversationalRetrievalQAChain/prompts'
|
||||||
import { addImagesToMessages, llmSupportsVision } from '../../../src/multiModalUtils'
|
import { addImagesToMessages, llmSupportsVision } from '../../../src/multiModalUtils'
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers'
|
|
||||||
import { Tool } from '@langchain/core/tools'
|
|
||||||
|
|
||||||
class ConversationalRetrievalToolAgent_Agents implements INode {
|
class ConversationalRetrievalToolAgent_Agents implements INode {
|
||||||
label: string
|
label: string
|
||||||
|
|
@ -44,7 +42,7 @@ class ConversationalRetrievalToolAgent_Agents implements INode {
|
||||||
constructor(fields?: { sessionId?: string }) {
|
constructor(fields?: { sessionId?: string }) {
|
||||||
this.label = 'Conversational Retrieval Tool Agent'
|
this.label = 'Conversational Retrieval Tool Agent'
|
||||||
this.name = 'conversationalRetrievalToolAgent'
|
this.name = 'conversationalRetrievalToolAgent'
|
||||||
this.author = 'niztal(falkor) and nikitas-novatix'
|
this.author = 'niztal(falkor)'
|
||||||
this.version = 1.0
|
this.version = 1.0
|
||||||
this.type = 'AgentExecutor'
|
this.type = 'AgentExecutor'
|
||||||
this.category = 'Agents'
|
this.category = 'Agents'
|
||||||
|
|
@ -81,26 +79,6 @@ class ConversationalRetrievalToolAgent_Agents implements INode {
|
||||||
optional: true,
|
optional: true,
|
||||||
default: RESPONSE_TEMPLATE
|
default: RESPONSE_TEMPLATE
|
||||||
},
|
},
|
||||||
{
|
|
||||||
label: 'Rephrase Prompt',
|
|
||||||
name: 'rephrasePrompt',
|
|
||||||
type: 'string',
|
|
||||||
description: 'Using previous chat history, rephrase question into a standalone question',
|
|
||||||
warning: 'Prompt must include input variables: {chat_history} and {question}',
|
|
||||||
rows: 4,
|
|
||||||
additionalParams: true,
|
|
||||||
optional: true,
|
|
||||||
default: REPHRASE_TEMPLATE
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Rephrase Model',
|
|
||||||
name: 'rephraseModel',
|
|
||||||
type: 'BaseChatModel',
|
|
||||||
description:
|
|
||||||
'Optional: Use a different (faster/cheaper) model for rephrasing. If not specified, uses the main Tool Calling Chat Model.',
|
|
||||||
optional: true,
|
|
||||||
additionalParams: true
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
label: 'Input Moderation',
|
label: 'Input Moderation',
|
||||||
description: 'Detect text that could generate harmful output and prevent it from being sent to the language model',
|
description: 'Detect text that could generate harmful output and prevent it from being sent to the language model',
|
||||||
|
|
@ -125,9 +103,8 @@ class ConversationalRetrievalToolAgent_Agents implements INode {
|
||||||
this.sessionId = fields?.sessionId
|
this.sessionId = fields?.sessionId
|
||||||
}
|
}
|
||||||
|
|
||||||
// The agent will be prepared in run() with the correct user message - it needs the actual runtime input for rephrasing
|
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
|
||||||
async init(_nodeData: INodeData, _input: string, _options: ICommonObject): Promise<any> {
|
return prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
|
||||||
return null
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
|
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
|
||||||
|
|
@ -171,23 +148,6 @@ class ConversationalRetrievalToolAgent_Agents implements INode {
|
||||||
sseStreamer.streamUsedToolsEvent(chatId, res.usedTools)
|
sseStreamer.streamUsedToolsEvent(chatId, res.usedTools)
|
||||||
usedTools = res.usedTools
|
usedTools = res.usedTools
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the tool is set to returnDirect, stream the output to the client
|
|
||||||
if (res.usedTools && res.usedTools.length) {
|
|
||||||
let inputTools = nodeData.inputs?.tools
|
|
||||||
inputTools = flatten(inputTools)
|
|
||||||
for (const tool of res.usedTools) {
|
|
||||||
const inputTool = inputTools.find((inputTool: Tool) => inputTool.name === tool.tool)
|
|
||||||
if (inputTool && (inputTool as any).returnDirect && shouldStreamResponse) {
|
|
||||||
sseStreamer.streamTokenEvent(chatId, tool.toolOutput)
|
|
||||||
// Prevent CustomChainHandler from streaming the same output again
|
|
||||||
if (res.output === tool.toolOutput) {
|
|
||||||
res.output = ''
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// The CustomChainHandler will send the stream end event
|
|
||||||
} else {
|
} else {
|
||||||
res = await executor.invoke({ input }, { callbacks: [loggerHandler, ...callbacks] })
|
res = await executor.invoke({ input }, { callbacks: [loggerHandler, ...callbacks] })
|
||||||
if (res.sourceDocuments) {
|
if (res.sourceDocuments) {
|
||||||
|
|
@ -250,11 +210,9 @@ const prepareAgent = async (
|
||||||
flowObj: { sessionId?: string; chatId?: string; input?: string }
|
flowObj: { sessionId?: string; chatId?: string; input?: string }
|
||||||
) => {
|
) => {
|
||||||
const model = nodeData.inputs?.model as BaseChatModel
|
const model = nodeData.inputs?.model as BaseChatModel
|
||||||
const rephraseModel = (nodeData.inputs?.rephraseModel as BaseChatModel) || model // Use main model if not specified
|
|
||||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||||
let systemMessage = nodeData.inputs?.systemMessage as string
|
let systemMessage = nodeData.inputs?.systemMessage as string
|
||||||
let rephrasePrompt = nodeData.inputs?.rephrasePrompt as string
|
|
||||||
let tools = nodeData.inputs?.tools
|
let tools = nodeData.inputs?.tools
|
||||||
tools = flatten(tools)
|
tools = flatten(tools)
|
||||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||||
|
|
@ -262,9 +220,6 @@ const prepareAgent = async (
|
||||||
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever
|
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever
|
||||||
|
|
||||||
systemMessage = transformBracesWithColon(systemMessage)
|
systemMessage = transformBracesWithColon(systemMessage)
|
||||||
if (rephrasePrompt) {
|
|
||||||
rephrasePrompt = transformBracesWithColon(rephrasePrompt)
|
|
||||||
}
|
|
||||||
|
|
||||||
const prompt = ChatPromptTemplate.fromMessages([
|
const prompt = ChatPromptTemplate.fromMessages([
|
||||||
['system', systemMessage ? systemMessage : `You are a helpful AI assistant.`],
|
['system', systemMessage ? systemMessage : `You are a helpful AI assistant.`],
|
||||||
|
|
@ -308,37 +263,6 @@ const prepareAgent = async (
|
||||||
|
|
||||||
const modelWithTools = model.bindTools(tools)
|
const modelWithTools = model.bindTools(tools)
|
||||||
|
|
||||||
// Function to get standalone question (either rephrased or original)
|
|
||||||
const getStandaloneQuestion = async (input: string): Promise<string> => {
|
|
||||||
// If no rephrase prompt, return the original input
|
|
||||||
if (!rephrasePrompt) {
|
|
||||||
return input
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get chat history (use empty string if none)
|
|
||||||
const messages = (await memory.getChatMessages(flowObj?.sessionId, true)) as BaseMessage[]
|
|
||||||
const iMessages = convertBaseMessagetoIMessage(messages)
|
|
||||||
const chatHistoryString = convertChatHistoryToText(iMessages)
|
|
||||||
|
|
||||||
// Always rephrase to normalize/expand user queries for better retrieval
|
|
||||||
try {
|
|
||||||
const CONDENSE_QUESTION_PROMPT = PromptTemplate.fromTemplate(rephrasePrompt)
|
|
||||||
const condenseQuestionChain = RunnableSequence.from([CONDENSE_QUESTION_PROMPT, rephraseModel, new StringOutputParser()])
|
|
||||||
const res = await condenseQuestionChain.invoke({
|
|
||||||
question: input,
|
|
||||||
chat_history: chatHistoryString
|
|
||||||
})
|
|
||||||
return res
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error rephrasing question:', error)
|
|
||||||
// On error, fall back to original input
|
|
||||||
return input
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get standalone question before creating runnable
|
|
||||||
const standaloneQuestion = await getStandaloneQuestion(flowObj?.input || '')
|
|
||||||
|
|
||||||
const runnableAgent = RunnableSequence.from([
|
const runnableAgent = RunnableSequence.from([
|
||||||
{
|
{
|
||||||
[inputKey]: (i: { input: string; steps: ToolsAgentStep[] }) => i.input,
|
[inputKey]: (i: { input: string; steps: ToolsAgentStep[] }) => i.input,
|
||||||
|
|
@ -348,9 +272,7 @@ const prepareAgent = async (
|
||||||
return messages ?? []
|
return messages ?? []
|
||||||
},
|
},
|
||||||
context: async (i: { input: string; chatHistory?: string }) => {
|
context: async (i: { input: string; chatHistory?: string }) => {
|
||||||
// Use the standalone question (rephrased or original) for retrieval
|
const relevantDocs = await vectorStoreRetriever.invoke(i.input)
|
||||||
const retrievalQuery = standaloneQuestion || i.input
|
|
||||||
const relevantDocs = await vectorStoreRetriever.invoke(retrievalQuery)
|
|
||||||
const formattedDocs = formatDocs(relevantDocs)
|
const formattedDocs = formatDocs(relevantDocs)
|
||||||
return formattedDocs
|
return formattedDocs
|
||||||
}
|
}
|
||||||
|
|
@ -373,6 +295,4 @@ const prepareAgent = async (
|
||||||
return executor
|
return executor
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = { nodeClass: ConversationalRetrievalToolAgent_Agents }
|
||||||
nodeClass: ConversationalRetrievalToolAgent_Agents
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -578,7 +578,7 @@ class OpenAIAssistant_Agents implements INode {
|
||||||
toolOutput
|
toolOutput
|
||||||
})
|
})
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
await analyticHandlers.onToolError(toolIds, e)
|
await analyticHandlers.onToolEnd(toolIds, e)
|
||||||
console.error('Error executing tool', e)
|
console.error('Error executing tool', e)
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Error executing tool. Tool: ${tool.name}. Thread ID: ${threadId}. Run ID: ${runThreadId}`
|
`Error executing tool. Tool: ${tool.name}. Thread ID: ${threadId}. Run ID: ${runThreadId}`
|
||||||
|
|
@ -703,7 +703,7 @@ class OpenAIAssistant_Agents implements INode {
|
||||||
toolOutput
|
toolOutput
|
||||||
})
|
})
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
await analyticHandlers.onToolError(toolIds, e)
|
await analyticHandlers.onToolEnd(toolIds, e)
|
||||||
console.error('Error executing tool', e)
|
console.error('Error executing tool', e)
|
||||||
clearInterval(timeout)
|
clearInterval(timeout)
|
||||||
reject(
|
reject(
|
||||||
|
|
@ -1096,7 +1096,7 @@ async function handleToolSubmission(params: ToolSubmissionParams): Promise<ToolS
|
||||||
toolOutput
|
toolOutput
|
||||||
})
|
})
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
await analyticHandlers.onToolError(toolIds, e)
|
await analyticHandlers.onToolEnd(toolIds, e)
|
||||||
console.error('Error executing tool', e)
|
console.error('Error executing tool', e)
|
||||||
throw new Error(`Error executing tool. Tool: ${tool.name}. Thread ID: ${threadId}. Run ID: ${runThreadId}`)
|
throw new Error(`Error executing tool. Tool: ${tool.name}. Thread ID: ${threadId}. Run ID: ${runThreadId}`)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -91,7 +91,7 @@ class ChatAnthropic_ChatModels implements INode {
|
||||||
label: 'Extended Thinking',
|
label: 'Extended Thinking',
|
||||||
name: 'extendedThinking',
|
name: 'extendedThinking',
|
||||||
type: 'boolean',
|
type: 'boolean',
|
||||||
description: 'Enable extended thinking for reasoning model such as Claude Sonnet 3.7 and Claude 4',
|
description: 'Enable extended thinking for reasoning model such as Claude Sonnet 3.7',
|
||||||
optional: true,
|
optional: true,
|
||||||
additionalParams: true
|
additionalParams: true
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -174,18 +174,6 @@ class GoogleGenerativeAI_ChatModels implements INode {
|
||||||
optional: true,
|
optional: true,
|
||||||
additionalParams: true
|
additionalParams: true
|
||||||
},
|
},
|
||||||
{
|
|
||||||
label: 'Thinking Budget',
|
|
||||||
name: 'thinkingBudget',
|
|
||||||
type: 'number',
|
|
||||||
description: 'Guides the number of thinking tokens. -1 for dynamic, 0 to disable, or positive integer (Gemini 2.5 models).',
|
|
||||||
step: 1,
|
|
||||||
optional: true,
|
|
||||||
additionalParams: true,
|
|
||||||
show: {
|
|
||||||
modelName: ['gemini-2.5-pro', 'gemini-2.5-flash', 'gemini-2.5-flash-lite']
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
label: 'Base URL',
|
label: 'Base URL',
|
||||||
name: 'baseUrl',
|
name: 'baseUrl',
|
||||||
|
|
@ -228,7 +216,6 @@ class GoogleGenerativeAI_ChatModels implements INode {
|
||||||
const cache = nodeData.inputs?.cache as BaseCache
|
const cache = nodeData.inputs?.cache as BaseCache
|
||||||
const streaming = nodeData.inputs?.streaming as boolean
|
const streaming = nodeData.inputs?.streaming as boolean
|
||||||
const baseUrl = nodeData.inputs?.baseUrl as string | undefined
|
const baseUrl = nodeData.inputs?.baseUrl as string | undefined
|
||||||
const thinkingBudget = nodeData.inputs?.thinkingBudget as string
|
|
||||||
|
|
||||||
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean
|
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean
|
||||||
|
|
||||||
|
|
@ -248,7 +235,6 @@ class GoogleGenerativeAI_ChatModels implements INode {
|
||||||
if (cache) obj.cache = cache
|
if (cache) obj.cache = cache
|
||||||
if (temperature) obj.temperature = parseFloat(temperature)
|
if (temperature) obj.temperature = parseFloat(temperature)
|
||||||
if (baseUrl) obj.baseUrl = baseUrl
|
if (baseUrl) obj.baseUrl = baseUrl
|
||||||
if (thinkingBudget) obj.thinkingBudget = parseInt(thinkingBudget, 10)
|
|
||||||
|
|
||||||
let safetySettings: SafetySetting[] = []
|
let safetySettings: SafetySetting[] = []
|
||||||
if (_safetySettings) {
|
if (_safetySettings) {
|
||||||
|
|
|
||||||
|
|
@ -174,9 +174,6 @@ export interface GoogleGenerativeAIChatInput extends BaseChatModelParams, Pick<G
|
||||||
* - Gemini 1.0 Pro version gemini-1.0-pro-002
|
* - Gemini 1.0 Pro version gemini-1.0-pro-002
|
||||||
*/
|
*/
|
||||||
convertSystemMessageToHumanContent?: boolean | undefined
|
convertSystemMessageToHumanContent?: boolean | undefined
|
||||||
|
|
||||||
/** Thinking budget for Gemini 2.5 thinking models. Supports -1 (dynamic), 0 (off), or positive integers. */
|
|
||||||
thinkingBudget?: number
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -602,17 +599,10 @@ export class LangchainChatGoogleGenerativeAI
|
||||||
|
|
||||||
convertSystemMessageToHumanContent: boolean | undefined
|
convertSystemMessageToHumanContent: boolean | undefined
|
||||||
|
|
||||||
thinkingBudget?: number
|
|
||||||
|
|
||||||
private client: GenerativeModel
|
private client: GenerativeModel
|
||||||
|
|
||||||
get _isMultimodalModel() {
|
get _isMultimodalModel() {
|
||||||
return (
|
return this.model.includes('vision') || this.model.startsWith('gemini-1.5') || this.model.startsWith('gemini-2')
|
||||||
this.model.includes('vision') ||
|
|
||||||
this.model.startsWith('gemini-1.5') ||
|
|
||||||
this.model.startsWith('gemini-2') ||
|
|
||||||
this.model.startsWith('gemini-3')
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
constructor(fields: GoogleGenerativeAIChatInput) {
|
constructor(fields: GoogleGenerativeAIChatInput) {
|
||||||
|
|
@ -667,7 +657,6 @@ export class LangchainChatGoogleGenerativeAI
|
||||||
|
|
||||||
this.streaming = fields.streaming ?? this.streaming
|
this.streaming = fields.streaming ?? this.streaming
|
||||||
this.json = fields.json
|
this.json = fields.json
|
||||||
this.thinkingBudget = fields.thinkingBudget
|
|
||||||
|
|
||||||
this.client = new GenerativeAI(this.apiKey).getGenerativeModel(
|
this.client = new GenerativeAI(this.apiKey).getGenerativeModel(
|
||||||
{
|
{
|
||||||
|
|
@ -687,22 +676,12 @@ export class LangchainChatGoogleGenerativeAI
|
||||||
baseUrl: fields.baseUrl
|
baseUrl: fields.baseUrl
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
if (this.thinkingBudget !== undefined) {
|
|
||||||
;(this.client.generationConfig as any).thinkingConfig = {
|
|
||||||
...(this.thinkingBudget !== undefined ? { thinkingBudget: this.thinkingBudget } : {})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this.streamUsage = fields.streamUsage ?? this.streamUsage
|
this.streamUsage = fields.streamUsage ?? this.streamUsage
|
||||||
}
|
}
|
||||||
|
|
||||||
useCachedContent(cachedContent: CachedContent, modelParams?: ModelParams, requestOptions?: RequestOptions): void {
|
useCachedContent(cachedContent: CachedContent, modelParams?: ModelParams, requestOptions?: RequestOptions): void {
|
||||||
if (!this.apiKey) return
|
if (!this.apiKey) return
|
||||||
this.client = new GenerativeAI(this.apiKey).getGenerativeModelFromCachedContent(cachedContent, modelParams, requestOptions)
|
this.client = new GenerativeAI(this.apiKey).getGenerativeModelFromCachedContent(cachedContent, modelParams, requestOptions)
|
||||||
if (this.thinkingBudget !== undefined) {
|
|
||||||
;(this.client.generationConfig as any).thinkingConfig = {
|
|
||||||
...(this.thinkingBudget !== undefined ? { thinkingBudget: this.thinkingBudget } : {})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
get useSystemInstruction(): boolean {
|
get useSystemInstruction(): boolean {
|
||||||
|
|
|
||||||
|
|
@ -48,8 +48,6 @@ export function getMessageAuthor(message: BaseMessage) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* !!! IMPORTANT: Must return 'user' as default instead of throwing error
|
|
||||||
* https://github.com/FlowiseAI/Flowise/issues/4743
|
|
||||||
* Maps a message type to a Google Generative AI chat author.
|
* Maps a message type to a Google Generative AI chat author.
|
||||||
* @param message The message to map.
|
* @param message The message to map.
|
||||||
* @param model The model to use for mapping.
|
* @param model The model to use for mapping.
|
||||||
|
|
@ -452,7 +450,6 @@ export function mapGenerateContentResultToChatResult(
|
||||||
const [candidate] = response.candidates
|
const [candidate] = response.candidates
|
||||||
const { content: candidateContent, ...generationInfo } = candidate
|
const { content: candidateContent, ...generationInfo } = candidate
|
||||||
let content: MessageContent | undefined
|
let content: MessageContent | undefined
|
||||||
const inlineDataItems: any[] = []
|
|
||||||
|
|
||||||
if (Array.isArray(candidateContent?.parts) && candidateContent.parts.length === 1 && candidateContent.parts[0].text) {
|
if (Array.isArray(candidateContent?.parts) && candidateContent.parts.length === 1 && candidateContent.parts[0].text) {
|
||||||
content = candidateContent.parts[0].text
|
content = candidateContent.parts[0].text
|
||||||
|
|
@ -473,18 +470,6 @@ export function mapGenerateContentResultToChatResult(
|
||||||
type: 'codeExecutionResult',
|
type: 'codeExecutionResult',
|
||||||
codeExecutionResult: p.codeExecutionResult
|
codeExecutionResult: p.codeExecutionResult
|
||||||
}
|
}
|
||||||
} else if ('inlineData' in p && p.inlineData) {
|
|
||||||
// Extract inline image data for processing by Agent
|
|
||||||
inlineDataItems.push({
|
|
||||||
type: 'gemini_inline_data',
|
|
||||||
mimeType: p.inlineData.mimeType,
|
|
||||||
data: p.inlineData.data
|
|
||||||
})
|
|
||||||
// Return the inline data as part of the content structure
|
|
||||||
return {
|
|
||||||
type: 'inlineData',
|
|
||||||
inlineData: p.inlineData
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return p
|
return p
|
||||||
})
|
})
|
||||||
|
|
@ -501,12 +486,6 @@ export function mapGenerateContentResultToChatResult(
|
||||||
text = block?.text ?? text
|
text = block?.text ?? text
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build response_metadata with inline data if present
|
|
||||||
const response_metadata: any = {}
|
|
||||||
if (inlineDataItems.length > 0) {
|
|
||||||
response_metadata.inlineData = inlineDataItems
|
|
||||||
}
|
|
||||||
|
|
||||||
const generation: ChatGeneration = {
|
const generation: ChatGeneration = {
|
||||||
text,
|
text,
|
||||||
message: new AIMessage({
|
message: new AIMessage({
|
||||||
|
|
@ -521,8 +500,7 @@ export function mapGenerateContentResultToChatResult(
|
||||||
additional_kwargs: {
|
additional_kwargs: {
|
||||||
...generationInfo
|
...generationInfo
|
||||||
},
|
},
|
||||||
usage_metadata: extra?.usageMetadata,
|
usage_metadata: extra?.usageMetadata
|
||||||
response_metadata: Object.keys(response_metadata).length > 0 ? response_metadata : undefined
|
|
||||||
}),
|
}),
|
||||||
generationInfo
|
generationInfo
|
||||||
}
|
}
|
||||||
|
|
@ -553,8 +531,6 @@ export function convertResponseContentToChatGenerationChunk(
|
||||||
const [candidate] = response.candidates
|
const [candidate] = response.candidates
|
||||||
const { content: candidateContent, ...generationInfo } = candidate
|
const { content: candidateContent, ...generationInfo } = candidate
|
||||||
let content: MessageContent | undefined
|
let content: MessageContent | undefined
|
||||||
const inlineDataItems: any[] = []
|
|
||||||
|
|
||||||
// Checks if some parts do not have text. If false, it means that the content is a string.
|
// Checks if some parts do not have text. If false, it means that the content is a string.
|
||||||
if (Array.isArray(candidateContent?.parts) && candidateContent.parts.every((p) => 'text' in p)) {
|
if (Array.isArray(candidateContent?.parts) && candidateContent.parts.every((p) => 'text' in p)) {
|
||||||
content = candidateContent.parts.map((p) => p.text).join('')
|
content = candidateContent.parts.map((p) => p.text).join('')
|
||||||
|
|
@ -575,18 +551,6 @@ export function convertResponseContentToChatGenerationChunk(
|
||||||
type: 'codeExecutionResult',
|
type: 'codeExecutionResult',
|
||||||
codeExecutionResult: p.codeExecutionResult
|
codeExecutionResult: p.codeExecutionResult
|
||||||
}
|
}
|
||||||
} else if ('inlineData' in p && p.inlineData) {
|
|
||||||
// Extract inline image data for processing by Agent
|
|
||||||
inlineDataItems.push({
|
|
||||||
type: 'gemini_inline_data',
|
|
||||||
mimeType: p.inlineData.mimeType,
|
|
||||||
data: p.inlineData.data
|
|
||||||
})
|
|
||||||
// Return the inline data as part of the content structure
|
|
||||||
return {
|
|
||||||
type: 'inlineData',
|
|
||||||
inlineData: p.inlineData
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return p
|
return p
|
||||||
})
|
})
|
||||||
|
|
@ -616,12 +580,6 @@ export function convertResponseContentToChatGenerationChunk(
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build response_metadata with inline data if present
|
|
||||||
const response_metadata: any = {}
|
|
||||||
if (inlineDataItems.length > 0) {
|
|
||||||
response_metadata.inlineData = inlineDataItems
|
|
||||||
}
|
|
||||||
|
|
||||||
return new ChatGenerationChunk({
|
return new ChatGenerationChunk({
|
||||||
text,
|
text,
|
||||||
message: new AIMessageChunk({
|
message: new AIMessageChunk({
|
||||||
|
|
@ -631,8 +589,7 @@ export function convertResponseContentToChatGenerationChunk(
|
||||||
// Each chunk can have unique "generationInfo", and merging strategy is unclear,
|
// Each chunk can have unique "generationInfo", and merging strategy is unclear,
|
||||||
// so leave blank for now.
|
// so leave blank for now.
|
||||||
additional_kwargs: {},
|
additional_kwargs: {},
|
||||||
usage_metadata: extra.usageMetadata,
|
usage_metadata: extra.usageMetadata
|
||||||
response_metadata: Object.keys(response_metadata).length > 0 ? response_metadata : undefined
|
|
||||||
}),
|
}),
|
||||||
generationInfo
|
generationInfo
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -41,17 +41,15 @@ class ChatHuggingFace_ChatModels implements INode {
|
||||||
label: 'Model',
|
label: 'Model',
|
||||||
name: 'model',
|
name: 'model',
|
||||||
type: 'string',
|
type: 'string',
|
||||||
description:
|
description: 'If using own inference endpoint, leave this blank',
|
||||||
'Model name (e.g., deepseek-ai/DeepSeek-V3.2-Exp:novita). If model includes provider (:) or using router endpoint, leave Endpoint blank.',
|
placeholder: 'gpt2'
|
||||||
placeholder: 'deepseek-ai/DeepSeek-V3.2-Exp:novita'
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Endpoint',
|
label: 'Endpoint',
|
||||||
name: 'endpoint',
|
name: 'endpoint',
|
||||||
type: 'string',
|
type: 'string',
|
||||||
placeholder: 'https://xyz.eu-west-1.aws.endpoints.huggingface.cloud/gpt2',
|
placeholder: 'https://xyz.eu-west-1.aws.endpoints.huggingface.cloud/gpt2',
|
||||||
description:
|
description: 'Using your own inference endpoint',
|
||||||
'Custom inference endpoint (optional). Not needed for models with providers (:) or router endpoints. Leave blank to use Inference Providers.',
|
|
||||||
optional: true
|
optional: true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
@ -105,7 +103,7 @@ class ChatHuggingFace_ChatModels implements INode {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
rows: 4,
|
rows: 4,
|
||||||
placeholder: 'AI assistant:',
|
placeholder: 'AI assistant:',
|
||||||
description: 'Sets the stop sequences to use. Use comma to separate different sequences.',
|
description: 'Sets the stop sequences to use. Use comma to seperate different sequences.',
|
||||||
optional: true,
|
optional: true,
|
||||||
additionalParams: true
|
additionalParams: true
|
||||||
}
|
}
|
||||||
|
|
@ -126,15 +124,6 @@ class ChatHuggingFace_ChatModels implements INode {
|
||||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||||
const huggingFaceApiKey = getCredentialParam('huggingFaceApiKey', credentialData, nodeData)
|
const huggingFaceApiKey = getCredentialParam('huggingFaceApiKey', credentialData, nodeData)
|
||||||
|
|
||||||
if (!huggingFaceApiKey) {
|
|
||||||
console.error('[ChatHuggingFace] API key validation failed: No API key found')
|
|
||||||
throw new Error('HuggingFace API key is required. Please configure it in the credential settings.')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!huggingFaceApiKey.startsWith('hf_')) {
|
|
||||||
console.warn('[ChatHuggingFace] API key format warning: Key does not start with "hf_"')
|
|
||||||
}
|
|
||||||
|
|
||||||
const obj: Partial<HFInput> = {
|
const obj: Partial<HFInput> = {
|
||||||
model,
|
model,
|
||||||
apiKey: huggingFaceApiKey
|
apiKey: huggingFaceApiKey
|
||||||
|
|
|
||||||
|
|
@ -56,9 +56,9 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
||||||
this.apiKey = fields?.apiKey ?? getEnvironmentVariable('HUGGINGFACEHUB_API_KEY')
|
this.apiKey = fields?.apiKey ?? getEnvironmentVariable('HUGGINGFACEHUB_API_KEY')
|
||||||
this.endpointUrl = fields?.endpointUrl
|
this.endpointUrl = fields?.endpointUrl
|
||||||
this.includeCredentials = fields?.includeCredentials
|
this.includeCredentials = fields?.includeCredentials
|
||||||
if (!this.apiKey || this.apiKey.trim() === '') {
|
if (!this.apiKey) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Please set an API key for HuggingFace Hub. Either configure it in the credential settings in the UI, or set the environment variable HUGGINGFACEHUB_API_KEY.'
|
'Please set an API key for HuggingFace Hub in the environment variable HUGGINGFACEHUB_API_KEY or in the apiKey field of the HuggingFaceInference constructor.'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -68,21 +68,19 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
||||||
}
|
}
|
||||||
|
|
||||||
invocationParams(options?: this['ParsedCallOptions']) {
|
invocationParams(options?: this['ParsedCallOptions']) {
|
||||||
// Return parameters compatible with chatCompletion API (OpenAI-compatible format)
|
return {
|
||||||
const params: any = {
|
model: this.model,
|
||||||
temperature: this.temperature,
|
parameters: {
|
||||||
max_tokens: this.maxTokens,
|
// make it behave similar to openai, returning only the generated text
|
||||||
stop: options?.stop ?? this.stopSequences,
|
return_full_text: false,
|
||||||
top_p: this.topP
|
temperature: this.temperature,
|
||||||
|
max_new_tokens: this.maxTokens,
|
||||||
|
stop: options?.stop ?? this.stopSequences,
|
||||||
|
top_p: this.topP,
|
||||||
|
top_k: this.topK,
|
||||||
|
repetition_penalty: this.frequencyPenalty
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// Include optional parameters if they are defined
|
|
||||||
if (this.topK !== undefined) {
|
|
||||||
params.top_k = this.topK
|
|
||||||
}
|
|
||||||
if (this.frequencyPenalty !== undefined) {
|
|
||||||
params.frequency_penalty = this.frequencyPenalty
|
|
||||||
}
|
|
||||||
return params
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async *_streamResponseChunks(
|
async *_streamResponseChunks(
|
||||||
|
|
@ -90,109 +88,51 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
||||||
options: this['ParsedCallOptions'],
|
options: this['ParsedCallOptions'],
|
||||||
runManager?: CallbackManagerForLLMRun
|
runManager?: CallbackManagerForLLMRun
|
||||||
): AsyncGenerator<GenerationChunk> {
|
): AsyncGenerator<GenerationChunk> {
|
||||||
try {
|
const hfi = await this._prepareHFInference()
|
||||||
const client = await this._prepareHFInference()
|
const stream = await this.caller.call(async () =>
|
||||||
const stream = await this.caller.call(async () =>
|
hfi.textGenerationStream({
|
||||||
client.chatCompletionStream({
|
...this.invocationParams(options),
|
||||||
model: this.model,
|
inputs: prompt
|
||||||
messages: [{ role: 'user', content: prompt }],
|
})
|
||||||
...this.invocationParams(options)
|
)
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
const token = chunk.token.text
|
||||||
|
yield new GenerationChunk({ text: token, generationInfo: chunk })
|
||||||
|
await runManager?.handleLLMNewToken(token ?? '')
|
||||||
|
|
||||||
|
// stream is done
|
||||||
|
if (chunk.generated_text)
|
||||||
|
yield new GenerationChunk({
|
||||||
|
text: '',
|
||||||
|
generationInfo: { finished: true }
|
||||||
})
|
})
|
||||||
)
|
|
||||||
for await (const chunk of stream) {
|
|
||||||
const token = chunk.choices[0]?.delta?.content || ''
|
|
||||||
if (token) {
|
|
||||||
yield new GenerationChunk({ text: token, generationInfo: chunk })
|
|
||||||
await runManager?.handleLLMNewToken(token)
|
|
||||||
}
|
|
||||||
// stream is done when finish_reason is set
|
|
||||||
if (chunk.choices[0]?.finish_reason) {
|
|
||||||
yield new GenerationChunk({
|
|
||||||
text: '',
|
|
||||||
generationInfo: { finished: true }
|
|
||||||
})
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error: any) {
|
|
||||||
console.error('[ChatHuggingFace] Error in _streamResponseChunks:', error)
|
|
||||||
// Provide more helpful error messages
|
|
||||||
if (error?.message?.includes('endpointUrl') || error?.message?.includes('third-party provider')) {
|
|
||||||
throw new Error(
|
|
||||||
`Cannot use custom endpoint with model "${this.model}" that includes a provider. Please leave the Endpoint field blank in the UI. Original error: ${error.message}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
throw error
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @ignore */
|
/** @ignore */
|
||||||
async _call(prompt: string, options: this['ParsedCallOptions']): Promise<string> {
|
async _call(prompt: string, options: this['ParsedCallOptions']): Promise<string> {
|
||||||
try {
|
const hfi = await this._prepareHFInference()
|
||||||
const client = await this._prepareHFInference()
|
const args = { ...this.invocationParams(options), inputs: prompt }
|
||||||
// Use chatCompletion for chat models (v4 supports conversational models via Inference Providers)
|
const res = await this.caller.callWithOptions({ signal: options.signal }, hfi.textGeneration.bind(hfi), args)
|
||||||
const args = {
|
return res.generated_text
|
||||||
model: this.model,
|
|
||||||
messages: [{ role: 'user', content: prompt }],
|
|
||||||
...this.invocationParams(options)
|
|
||||||
}
|
|
||||||
const res = await this.caller.callWithOptions({ signal: options.signal }, client.chatCompletion.bind(client), args)
|
|
||||||
const content = res.choices[0]?.message?.content || ''
|
|
||||||
if (!content) {
|
|
||||||
console.error('[ChatHuggingFace] No content in response:', JSON.stringify(res))
|
|
||||||
throw new Error(`No content received from HuggingFace API. Response: ${JSON.stringify(res)}`)
|
|
||||||
}
|
|
||||||
return content
|
|
||||||
} catch (error: any) {
|
|
||||||
console.error('[ChatHuggingFace] Error in _call:', error.message)
|
|
||||||
// Provide more helpful error messages
|
|
||||||
if (error?.message?.includes('endpointUrl') || error?.message?.includes('third-party provider')) {
|
|
||||||
throw new Error(
|
|
||||||
`Cannot use custom endpoint with model "${this.model}" that includes a provider. Please leave the Endpoint field blank in the UI. Original error: ${error.message}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
if (error?.message?.includes('Invalid username or password') || error?.message?.includes('authentication')) {
|
|
||||||
throw new Error(
|
|
||||||
`HuggingFace API authentication failed. Please verify your API key is correct and starts with "hf_". Original error: ${error.message}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
throw error
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @ignore */
|
/** @ignore */
|
||||||
private async _prepareHFInference() {
|
private async _prepareHFInference() {
|
||||||
if (!this.apiKey || this.apiKey.trim() === '') {
|
const { HfInference } = await HuggingFaceInference.imports()
|
||||||
console.error('[ChatHuggingFace] API key validation failed: Empty or undefined')
|
const hfi = new HfInference(this.apiKey, {
|
||||||
throw new Error('HuggingFace API key is required. Please configure it in the credential settings.')
|
includeCredentials: this.includeCredentials
|
||||||
}
|
})
|
||||||
|
return this.endpointUrl ? hfi.endpoint(this.endpointUrl) : hfi
|
||||||
const { InferenceClient } = await HuggingFaceInference.imports()
|
|
||||||
// Use InferenceClient for chat models (works better with Inference Providers)
|
|
||||||
const client = new InferenceClient(this.apiKey)
|
|
||||||
|
|
||||||
// Don't override endpoint if model uses a provider (contains ':') or if endpoint is router-based
|
|
||||||
// When using Inference Providers, endpoint should be left blank - InferenceClient handles routing automatically
|
|
||||||
if (
|
|
||||||
this.endpointUrl &&
|
|
||||||
!this.model.includes(':') &&
|
|
||||||
!this.endpointUrl.includes('/v1/chat/completions') &&
|
|
||||||
!this.endpointUrl.includes('router.huggingface.co')
|
|
||||||
) {
|
|
||||||
return client.endpoint(this.endpointUrl)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return client without endpoint override - InferenceClient will use Inference Providers automatically
|
|
||||||
return client
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @ignore */
|
/** @ignore */
|
||||||
static async imports(): Promise<{
|
static async imports(): Promise<{
|
||||||
InferenceClient: typeof import('@huggingface/inference').InferenceClient
|
HfInference: typeof import('@huggingface/inference').HfInference
|
||||||
}> {
|
}> {
|
||||||
try {
|
try {
|
||||||
const { InferenceClient } = await import('@huggingface/inference')
|
const { HfInference } = await import('@huggingface/inference')
|
||||||
return { InferenceClient }
|
return { HfInference }
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw new Error('Please install huggingface as a dependency with, e.g. `pnpm install @huggingface/inference`')
|
throw new Error('Please install huggingface as a dependency with, e.g. `pnpm install @huggingface/inference`')
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,7 @@
|
||||||
import { ChatOpenAI as LangchainChatOpenAI, ChatOpenAIFields } from '@langchain/openai'
|
import { ChatOpenAI, ChatOpenAIFields } from '@langchain/openai'
|
||||||
import { BaseCache } from '@langchain/core/caches'
|
import { BaseCache } from '@langchain/core/caches'
|
||||||
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeParams } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||||
import { ChatOpenRouter } from './FlowiseChatOpenRouter'
|
|
||||||
|
|
||||||
class ChatOpenRouter_ChatModels implements INode {
|
class ChatOpenRouter_ChatModels implements INode {
|
||||||
label: string
|
label: string
|
||||||
|
|
@ -24,7 +23,7 @@ class ChatOpenRouter_ChatModels implements INode {
|
||||||
this.icon = 'openRouter.svg'
|
this.icon = 'openRouter.svg'
|
||||||
this.category = 'Chat Models'
|
this.category = 'Chat Models'
|
||||||
this.description = 'Wrapper around Open Router Inference API'
|
this.description = 'Wrapper around Open Router Inference API'
|
||||||
this.baseClasses = [this.type, ...getBaseClasses(LangchainChatOpenAI)]
|
this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)]
|
||||||
this.credential = {
|
this.credential = {
|
||||||
label: 'Connect Credential',
|
label: 'Connect Credential',
|
||||||
name: 'credential',
|
name: 'credential',
|
||||||
|
|
@ -115,40 +114,6 @@ class ChatOpenRouter_ChatModels implements INode {
|
||||||
type: 'json',
|
type: 'json',
|
||||||
optional: true,
|
optional: true,
|
||||||
additionalParams: true
|
additionalParams: true
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Allow Image Uploads',
|
|
||||||
name: 'allowImageUploads',
|
|
||||||
type: 'boolean',
|
|
||||||
description:
|
|
||||||
'Allow image input. Refer to the <a href="https://docs.flowiseai.com/using-flowise/uploads#image" target="_blank">docs</a> for more details.',
|
|
||||||
default: false,
|
|
||||||
optional: true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Image Resolution',
|
|
||||||
description: 'This parameter controls the resolution in which the model views the image.',
|
|
||||||
name: 'imageResolution',
|
|
||||||
type: 'options',
|
|
||||||
options: [
|
|
||||||
{
|
|
||||||
label: 'Low',
|
|
||||||
name: 'low'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'High',
|
|
||||||
name: 'high'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Auto',
|
|
||||||
name: 'auto'
|
|
||||||
}
|
|
||||||
],
|
|
||||||
default: 'low',
|
|
||||||
optional: false,
|
|
||||||
show: {
|
|
||||||
allowImageUploads: true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
@ -165,8 +130,6 @@ class ChatOpenRouter_ChatModels implements INode {
|
||||||
const basePath = (nodeData.inputs?.basepath as string) || 'https://openrouter.ai/api/v1'
|
const basePath = (nodeData.inputs?.basepath as string) || 'https://openrouter.ai/api/v1'
|
||||||
const baseOptions = nodeData.inputs?.baseOptions
|
const baseOptions = nodeData.inputs?.baseOptions
|
||||||
const cache = nodeData.inputs?.cache as BaseCache
|
const cache = nodeData.inputs?.cache as BaseCache
|
||||||
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean
|
|
||||||
const imageResolution = nodeData.inputs?.imageResolution as string
|
|
||||||
|
|
||||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||||
const openRouterApiKey = getCredentialParam('openRouterApiKey', credentialData, nodeData)
|
const openRouterApiKey = getCredentialParam('openRouterApiKey', credentialData, nodeData)
|
||||||
|
|
@ -192,7 +155,7 @@ class ChatOpenRouter_ChatModels implements INode {
|
||||||
try {
|
try {
|
||||||
parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions)
|
parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions)
|
||||||
} catch (exception) {
|
} catch (exception) {
|
||||||
throw new Error("Invalid JSON in the ChatOpenRouter's BaseOptions: " + exception)
|
throw new Error("Invalid JSON in the ChatCerebras's BaseOptions: " + exception)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -203,15 +166,7 @@ class ChatOpenRouter_ChatModels implements INode {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const multiModalOption: IMultiModalOption = {
|
const model = new ChatOpenAI(obj)
|
||||||
image: {
|
|
||||||
allowImageUploads: allowImageUploads ?? false,
|
|
||||||
imageResolution
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const model = new ChatOpenRouter(nodeData.id, obj)
|
|
||||||
model.setMultiModalOption(multiModalOption)
|
|
||||||
return model
|
return model
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,29 +0,0 @@
|
||||||
import { ChatOpenAI as LangchainChatOpenAI, ChatOpenAIFields } from '@langchain/openai'
|
|
||||||
import { IMultiModalOption, IVisionChatModal } from '../../../src'
|
|
||||||
|
|
||||||
export class ChatOpenRouter extends LangchainChatOpenAI implements IVisionChatModal {
|
|
||||||
configuredModel: string
|
|
||||||
configuredMaxToken?: number
|
|
||||||
multiModalOption: IMultiModalOption
|
|
||||||
id: string
|
|
||||||
|
|
||||||
constructor(id: string, fields?: ChatOpenAIFields) {
|
|
||||||
super(fields)
|
|
||||||
this.id = id
|
|
||||||
this.configuredModel = fields?.modelName ?? ''
|
|
||||||
this.configuredMaxToken = fields?.maxTokens
|
|
||||||
}
|
|
||||||
|
|
||||||
revertToOriginalModel(): void {
|
|
||||||
this.model = this.configuredModel
|
|
||||||
this.maxTokens = this.configuredMaxToken
|
|
||||||
}
|
|
||||||
|
|
||||||
setMultiModalOption(multiModalOption: IMultiModalOption): void {
|
|
||||||
this.multiModalOption = multiModalOption
|
|
||||||
}
|
|
||||||
|
|
||||||
setVisionModel(): void {
|
|
||||||
// pass - OpenRouter models don't need model switching
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -95,7 +95,7 @@ class API_DocumentLoaders implements INode {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
rows: 4,
|
rows: 4,
|
||||||
description:
|
description:
|
||||||
'Each document loader comes with a default set of metadata keys that are extracted from the document. You can use this field to omit some of the default metadata keys. The value should be a list of keys, separated by comma. Use * to omit all metadata keys except the ones you specify in the Additional Metadata field',
|
'Each document loader comes with a default set of metadata keys that are extracted from the document. You can use this field to omit some of the default metadata keys. The value should be a list of keys, seperated by comma. Use * to omit all metadata keys execept the ones you specify in the Additional Metadata field',
|
||||||
placeholder: 'key1, key2, key3.nestedKey1',
|
placeholder: 'key1, key2, key3.nestedKey1',
|
||||||
optional: true,
|
optional: true,
|
||||||
additionalParams: true
|
additionalParams: true
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ import { TextLoader } from 'langchain/document_loaders/fs/text'
|
||||||
import Papa from 'papaparse'
|
import Papa from 'papaparse'
|
||||||
|
|
||||||
type CSVLoaderOptions = {
|
type CSVLoaderOptions = {
|
||||||
// Return specific column from key (string) or index (integer)
|
// Return specifific column from key (string) or index (integer)
|
||||||
column?: string | number
|
column?: string | number
|
||||||
// Force separator (default: auto detect)
|
// Force separator (default: auto detect)
|
||||||
separator?: string
|
separator?: string
|
||||||
|
|
|
||||||
|
|
@ -47,7 +47,7 @@ class Json_DocumentLoaders implements INode {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.label = 'Json File'
|
this.label = 'Json File'
|
||||||
this.name = 'jsonFile'
|
this.name = 'jsonFile'
|
||||||
this.version = 3.1
|
this.version = 3.0
|
||||||
this.type = 'Document'
|
this.type = 'Document'
|
||||||
this.icon = 'json.svg'
|
this.icon = 'json.svg'
|
||||||
this.category = 'Document Loaders'
|
this.category = 'Document Loaders'
|
||||||
|
|
@ -66,14 +66,6 @@ class Json_DocumentLoaders implements INode {
|
||||||
type: 'TextSplitter',
|
type: 'TextSplitter',
|
||||||
optional: true
|
optional: true
|
||||||
},
|
},
|
||||||
{
|
|
||||||
label: 'Separate by JSON Object (JSON Array)',
|
|
||||||
name: 'separateByObject',
|
|
||||||
type: 'boolean',
|
|
||||||
description: 'If enabled and the file is a JSON Array, each JSON object will be extracted as a chunk',
|
|
||||||
optional: true,
|
|
||||||
additionalParams: true
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
label: 'Pointers Extraction (separated by commas)',
|
label: 'Pointers Extraction (separated by commas)',
|
||||||
name: 'pointersName',
|
name: 'pointersName',
|
||||||
|
|
@ -81,10 +73,7 @@ class Json_DocumentLoaders implements INode {
|
||||||
description:
|
description:
|
||||||
'Ex: { "key": "value" }, Pointer Extraction = "key", "value" will be extracted as pageContent of the chunk. Use comma to separate multiple pointers',
|
'Ex: { "key": "value" }, Pointer Extraction = "key", "value" will be extracted as pageContent of the chunk. Use comma to separate multiple pointers',
|
||||||
placeholder: 'key1, key2',
|
placeholder: 'key1, key2',
|
||||||
optional: true,
|
optional: true
|
||||||
hide: {
|
|
||||||
separateByObject: true
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Additional Metadata',
|
label: 'Additional Metadata',
|
||||||
|
|
@ -133,7 +122,6 @@ class Json_DocumentLoaders implements INode {
|
||||||
const pointersName = nodeData.inputs?.pointersName as string
|
const pointersName = nodeData.inputs?.pointersName as string
|
||||||
const metadata = nodeData.inputs?.metadata
|
const metadata = nodeData.inputs?.metadata
|
||||||
const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string
|
const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string
|
||||||
const separateByObject = nodeData.inputs?.separateByObject as boolean
|
|
||||||
const output = nodeData.outputs?.output as string
|
const output = nodeData.outputs?.output as string
|
||||||
|
|
||||||
let omitMetadataKeys: string[] = []
|
let omitMetadataKeys: string[] = []
|
||||||
|
|
@ -165,7 +153,7 @@ class Json_DocumentLoaders implements INode {
|
||||||
if (!file) continue
|
if (!file) continue
|
||||||
const fileData = await getFileFromStorage(file, orgId, chatflowid)
|
const fileData = await getFileFromStorage(file, orgId, chatflowid)
|
||||||
const blob = new Blob([fileData])
|
const blob = new Blob([fileData])
|
||||||
const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata, separateByObject)
|
const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata)
|
||||||
|
|
||||||
if (textSplitter) {
|
if (textSplitter) {
|
||||||
let splittedDocs = await loader.load()
|
let splittedDocs = await loader.load()
|
||||||
|
|
@ -188,7 +176,7 @@ class Json_DocumentLoaders implements INode {
|
||||||
splitDataURI.pop()
|
splitDataURI.pop()
|
||||||
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
|
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
|
||||||
const blob = new Blob([bf])
|
const blob = new Blob([bf])
|
||||||
const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata, separateByObject)
|
const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata)
|
||||||
|
|
||||||
if (textSplitter) {
|
if (textSplitter) {
|
||||||
let splittedDocs = await loader.load()
|
let splittedDocs = await loader.load()
|
||||||
|
|
@ -318,20 +306,13 @@ class TextLoader extends BaseDocumentLoader {
|
||||||
class JSONLoader extends TextLoader {
|
class JSONLoader extends TextLoader {
|
||||||
public pointers: string[]
|
public pointers: string[]
|
||||||
private metadataMapping: Record<string, string>
|
private metadataMapping: Record<string, string>
|
||||||
private separateByObject: boolean
|
|
||||||
|
|
||||||
constructor(
|
constructor(filePathOrBlob: string | Blob, pointers: string | string[] = [], metadataMapping: Record<string, string> = {}) {
|
||||||
filePathOrBlob: string | Blob,
|
|
||||||
pointers: string | string[] = [],
|
|
||||||
metadataMapping: Record<string, string> = {},
|
|
||||||
separateByObject: boolean = false
|
|
||||||
) {
|
|
||||||
super(filePathOrBlob)
|
super(filePathOrBlob)
|
||||||
this.pointers = Array.isArray(pointers) ? pointers : [pointers]
|
this.pointers = Array.isArray(pointers) ? pointers : [pointers]
|
||||||
if (metadataMapping) {
|
if (metadataMapping) {
|
||||||
this.metadataMapping = typeof metadataMapping === 'object' ? metadataMapping : JSON.parse(metadataMapping)
|
this.metadataMapping = typeof metadataMapping === 'object' ? metadataMapping : JSON.parse(metadataMapping)
|
||||||
}
|
}
|
||||||
this.separateByObject = separateByObject
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected async parse(raw: string): Promise<Document[]> {
|
protected async parse(raw: string): Promise<Document[]> {
|
||||||
|
|
@ -342,24 +323,14 @@ class JSONLoader extends TextLoader {
|
||||||
const jsonArray = Array.isArray(json) ? json : [json]
|
const jsonArray = Array.isArray(json) ? json : [json]
|
||||||
|
|
||||||
for (const item of jsonArray) {
|
for (const item of jsonArray) {
|
||||||
if (this.separateByObject) {
|
const content = this.extractContent(item)
|
||||||
if (typeof item === 'object' && item !== null && !Array.isArray(item)) {
|
const metadata = this.extractMetadata(item)
|
||||||
const metadata = this.extractMetadata(item)
|
|
||||||
const pageContent = this.formatObjectAsKeyValue(item)
|
for (const pageContent of content) {
|
||||||
documents.push({
|
documents.push({
|
||||||
pageContent,
|
pageContent,
|
||||||
metadata
|
metadata
|
||||||
})
|
})
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const content = this.extractContent(item)
|
|
||||||
const metadata = this.extractMetadata(item)
|
|
||||||
for (const pageContent of content) {
|
|
||||||
documents.push({
|
|
||||||
pageContent,
|
|
||||||
metadata
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -399,30 +370,6 @@ class JSONLoader extends TextLoader {
|
||||||
return metadata
|
return metadata
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Formats a JSON object as readable key-value pairs
|
|
||||||
*/
|
|
||||||
private formatObjectAsKeyValue(obj: any, prefix: string = ''): string {
|
|
||||||
const lines: string[] = []
|
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(obj)) {
|
|
||||||
const fullKey = prefix ? `${prefix}.${key}` : key
|
|
||||||
|
|
||||||
if (value === null || value === undefined) {
|
|
||||||
lines.push(`${fullKey}: ${value}`)
|
|
||||||
} else if (Array.isArray(value)) {
|
|
||||||
lines.push(`${fullKey}: ${JSON.stringify(value)}`)
|
|
||||||
} else if (typeof value === 'object') {
|
|
||||||
// Recursively format nested objects
|
|
||||||
lines.push(this.formatObjectAsKeyValue(value, fullKey))
|
|
||||||
} else {
|
|
||||||
lines.push(`${fullKey}: ${value}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return lines.join('\n')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If JSON pointers are specified, return all strings below any of them
|
* If JSON pointers are specified, return all strings below any of them
|
||||||
* and exclude all other nodes expect if they match a JSON pointer.
|
* and exclude all other nodes expect if they match a JSON pointer.
|
||||||
|
|
|
||||||
|
|
@ -100,7 +100,7 @@ export class OxylabsLoader extends BaseDocumentLoader {
|
||||||
const params = {
|
const params = {
|
||||||
source: this.params.source,
|
source: this.params.source,
|
||||||
geo_location: this.params.geo_location,
|
geo_location: this.params.geo_location,
|
||||||
render: this.params.render ? 'html' : null,
|
render: this.params.render,
|
||||||
parse: this.params.parse,
|
parse: this.params.parse,
|
||||||
user_agent_type: this.params.user_agent_type,
|
user_agent_type: this.params.user_agent_type,
|
||||||
markdown: !this.params.parse,
|
markdown: !this.params.parse,
|
||||||
|
|
@ -110,14 +110,11 @@ export class OxylabsLoader extends BaseDocumentLoader {
|
||||||
|
|
||||||
const response = await this.sendAPIRequest<OxylabsResponse>(params)
|
const response = await this.sendAPIRequest<OxylabsResponse>(params)
|
||||||
|
|
||||||
const docs: OxylabsDocument[] = response.data.results.map((result, index) => {
|
const docs: OxylabsDocument[] = response.data.results.map((result, index) => ({
|
||||||
const content = typeof result.content === 'string' ? result.content : JSON.stringify(result.content)
|
id: `${response.data.job.id.toString()}-${index}`,
|
||||||
return {
|
pageContent: result.content,
|
||||||
id: `${response.data.job.id.toString()}-${index}`,
|
metadata: {}
|
||||||
pageContent: content,
|
}))
|
||||||
metadata: {}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return docs
|
return docs
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -190,14 +190,11 @@ class Playwright_DocumentLoaders implements INode {
|
||||||
async function playwrightLoader(url: string): Promise<Document[] | undefined> {
|
async function playwrightLoader(url: string): Promise<Document[] | undefined> {
|
||||||
try {
|
try {
|
||||||
let docs = []
|
let docs = []
|
||||||
|
|
||||||
const executablePath = process.env.PLAYWRIGHT_EXECUTABLE_PATH
|
|
||||||
|
|
||||||
const config: PlaywrightWebBaseLoaderOptions = {
|
const config: PlaywrightWebBaseLoaderOptions = {
|
||||||
launchOptions: {
|
launchOptions: {
|
||||||
args: ['--no-sandbox'],
|
args: ['--no-sandbox'],
|
||||||
headless: true,
|
headless: true,
|
||||||
executablePath: executablePath
|
executablePath: process.env.PLAYWRIGHT_EXECUTABLE_FILE_PATH
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (waitUntilGoToOption) {
|
if (waitUntilGoToOption) {
|
||||||
|
|
|
||||||
|
|
@ -181,14 +181,11 @@ class Puppeteer_DocumentLoaders implements INode {
|
||||||
async function puppeteerLoader(url: string): Promise<Document[] | undefined> {
|
async function puppeteerLoader(url: string): Promise<Document[] | undefined> {
|
||||||
try {
|
try {
|
||||||
let docs: Document[] = []
|
let docs: Document[] = []
|
||||||
|
|
||||||
const executablePath = process.env.PUPPETEER_EXECUTABLE_PATH
|
|
||||||
|
|
||||||
const config: PuppeteerWebBaseLoaderOptions = {
|
const config: PuppeteerWebBaseLoaderOptions = {
|
||||||
launchOptions: {
|
launchOptions: {
|
||||||
args: ['--no-sandbox'],
|
args: ['--no-sandbox'],
|
||||||
headless: 'new',
|
headless: 'new',
|
||||||
executablePath: executablePath
|
executablePath: process.env.PUPPETEER_EXECUTABLE_FILE_PATH
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (waitUntilGoToOption) {
|
if (waitUntilGoToOption) {
|
||||||
|
|
|
||||||
|
|
@ -27,6 +27,8 @@ type Element = {
|
||||||
}
|
}
|
||||||
|
|
||||||
export class UnstructuredLoader extends BaseDocumentLoader {
|
export class UnstructuredLoader extends BaseDocumentLoader {
|
||||||
|
public filePath: string
|
||||||
|
|
||||||
private apiUrl = process.env.UNSTRUCTURED_API_URL || 'https://api.unstructuredapp.io/general/v0/general'
|
private apiUrl = process.env.UNSTRUCTURED_API_URL || 'https://api.unstructuredapp.io/general/v0/general'
|
||||||
|
|
||||||
private apiKey: string | undefined = process.env.UNSTRUCTURED_API_KEY
|
private apiKey: string | undefined = process.env.UNSTRUCTURED_API_KEY
|
||||||
|
|
@ -136,7 +138,7 @@ export class UnstructuredLoader extends BaseDocumentLoader {
|
||||||
})
|
})
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`Failed to partition file with error ${response.status} and message ${await response.text()}`)
|
throw new Error(`Failed to partition file ${this.filePath} with error ${response.status} and message ${await response.text()}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
const elements = await response.json()
|
const elements = await response.json()
|
||||||
|
|
|
||||||
|
|
@ -4,11 +4,15 @@ import {
|
||||||
UnstructuredLoaderOptions,
|
UnstructuredLoaderOptions,
|
||||||
UnstructuredLoaderStrategy,
|
UnstructuredLoaderStrategy,
|
||||||
SkipInferTableTypes,
|
SkipInferTableTypes,
|
||||||
HiResModelName
|
HiResModelName,
|
||||||
|
UnstructuredLoader as LCUnstructuredLoader
|
||||||
} from '@langchain/community/document_loaders/fs/unstructured'
|
} from '@langchain/community/document_loaders/fs/unstructured'
|
||||||
import { getCredentialData, getCredentialParam, handleEscapeCharacters } from '../../../src/utils'
|
import { getCredentialData, getCredentialParam, handleEscapeCharacters } from '../../../src/utils'
|
||||||
import { getFileFromStorage, INodeOutputsValue } from '../../../src'
|
import { getFileFromStorage, INodeOutputsValue } from '../../../src'
|
||||||
import { UnstructuredLoader } from './Unstructured'
|
import { UnstructuredLoader } from './Unstructured'
|
||||||
|
import { isPathTraversal } from '../../../src/validator'
|
||||||
|
import sanitize from 'sanitize-filename'
|
||||||
|
import path from 'path'
|
||||||
|
|
||||||
class UnstructuredFile_DocumentLoaders implements INode {
|
class UnstructuredFile_DocumentLoaders implements INode {
|
||||||
label: string
|
label: string
|
||||||
|
|
@ -40,6 +44,17 @@ class UnstructuredFile_DocumentLoaders implements INode {
|
||||||
optional: true
|
optional: true
|
||||||
}
|
}
|
||||||
this.inputs = [
|
this.inputs = [
|
||||||
|
/** Deprecated
|
||||||
|
{
|
||||||
|
label: 'File Path',
|
||||||
|
name: 'filePath',
|
||||||
|
type: 'string',
|
||||||
|
placeholder: '',
|
||||||
|
optional: true,
|
||||||
|
warning:
|
||||||
|
'Use the File Upload instead of File path. If file is uploaded, this path is ignored. Path will be deprecated in future releases.'
|
||||||
|
},
|
||||||
|
*/
|
||||||
{
|
{
|
||||||
label: 'Files Upload',
|
label: 'Files Upload',
|
||||||
name: 'fileObject',
|
name: 'fileObject',
|
||||||
|
|
@ -440,6 +455,7 @@ class UnstructuredFile_DocumentLoaders implements INode {
|
||||||
}
|
}
|
||||||
|
|
||||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||||
|
const filePath = nodeData.inputs?.filePath as string
|
||||||
const unstructuredAPIUrl = nodeData.inputs?.unstructuredAPIUrl as string
|
const unstructuredAPIUrl = nodeData.inputs?.unstructuredAPIUrl as string
|
||||||
const strategy = nodeData.inputs?.strategy as UnstructuredLoaderStrategy
|
const strategy = nodeData.inputs?.strategy as UnstructuredLoaderStrategy
|
||||||
const encoding = nodeData.inputs?.encoding as string
|
const encoding = nodeData.inputs?.encoding as string
|
||||||
|
|
@ -544,8 +560,37 @@ class UnstructuredFile_DocumentLoaders implements INode {
|
||||||
docs.push(...loaderDocs)
|
docs.push(...loaderDocs)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else if (filePath) {
|
||||||
|
if (!filePath || typeof filePath !== 'string') {
|
||||||
|
throw new Error('Invalid file path format')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isPathTraversal(filePath)) {
|
||||||
|
throw new Error('Invalid path characters detected in filePath - path traversal not allowed')
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsedPath = path.parse(filePath)
|
||||||
|
const sanitizedFilename = sanitize(parsedPath.base)
|
||||||
|
|
||||||
|
if (!sanitizedFilename || sanitizedFilename.trim() === '') {
|
||||||
|
throw new Error('Invalid filename after sanitization')
|
||||||
|
}
|
||||||
|
|
||||||
|
const sanitizedFilePath = path.join(parsedPath.dir, sanitizedFilename)
|
||||||
|
|
||||||
|
if (!path.isAbsolute(sanitizedFilePath)) {
|
||||||
|
throw new Error('File path must be absolute')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sanitizedFilePath.includes('..')) {
|
||||||
|
throw new Error('Invalid file path - directory traversal not allowed')
|
||||||
|
}
|
||||||
|
|
||||||
|
const loader = new LCUnstructuredLoader(sanitizedFilePath, obj)
|
||||||
|
const loaderDocs = await loader.load()
|
||||||
|
docs.push(...loaderDocs)
|
||||||
} else {
|
} else {
|
||||||
throw new Error('File upload is required')
|
throw new Error('File path or File upload is required')
|
||||||
}
|
}
|
||||||
|
|
||||||
if (metadata) {
|
if (metadata) {
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,3 @@
|
||||||
/*
|
|
||||||
* Uncomment this if you want to use the UnstructuredFolder to load a folder from the file system
|
|
||||||
|
|
||||||
import { omit } from 'lodash'
|
import { omit } from 'lodash'
|
||||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||||
import {
|
import {
|
||||||
|
|
@ -519,4 +516,3 @@ class UnstructuredFolder_DocumentLoaders implements INode {
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { nodeClass: UnstructuredFolder_DocumentLoaders }
|
module.exports = { nodeClass: UnstructuredFolder_DocumentLoaders }
|
||||||
*/
|
|
||||||
|
|
|
||||||
|
|
@ -96,7 +96,7 @@ class AWSBedrockEmbedding_Embeddings implements INode {
|
||||||
{
|
{
|
||||||
label: 'Max AWS API retries',
|
label: 'Max AWS API retries',
|
||||||
name: 'maxRetries',
|
name: 'maxRetries',
|
||||||
description: 'This will limit the number of AWS API for Titan model embeddings call retries. Used to avoid throttling.',
|
description: 'This will limit the nubmer of AWS API for Titan model embeddings call retries. Used to avoid throttling.',
|
||||||
type: 'number',
|
type: 'number',
|
||||||
optional: true,
|
optional: true,
|
||||||
default: 5,
|
default: 5,
|
||||||
|
|
|
||||||
|
|
@ -23,22 +23,24 @@ export class HuggingFaceInferenceEmbeddings extends Embeddings implements Huggin
|
||||||
this.model = fields?.model ?? 'sentence-transformers/distilbert-base-nli-mean-tokens'
|
this.model = fields?.model ?? 'sentence-transformers/distilbert-base-nli-mean-tokens'
|
||||||
this.apiKey = fields?.apiKey ?? getEnvironmentVariable('HUGGINGFACEHUB_API_KEY')
|
this.apiKey = fields?.apiKey ?? getEnvironmentVariable('HUGGINGFACEHUB_API_KEY')
|
||||||
this.endpoint = fields?.endpoint ?? ''
|
this.endpoint = fields?.endpoint ?? ''
|
||||||
const hf = new HfInference(this.apiKey)
|
this.client = new HfInference(this.apiKey)
|
||||||
// v4 uses Inference Providers by default; only override if custom endpoint provided
|
if (this.endpoint) this.client.endpoint(this.endpoint)
|
||||||
this.client = this.endpoint ? hf.endpoint(this.endpoint) : hf
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async _embed(texts: string[]): Promise<number[][]> {
|
async _embed(texts: string[]): Promise<number[][]> {
|
||||||
// replace newlines, which can negatively affect performance.
|
// replace newlines, which can negatively affect performance.
|
||||||
const clean = texts.map((text) => text.replace(/\n/g, ' '))
|
const clean = texts.map((text) => text.replace(/\n/g, ' '))
|
||||||
|
const hf = new HfInference(this.apiKey)
|
||||||
const obj: any = {
|
const obj: any = {
|
||||||
inputs: clean
|
inputs: clean
|
||||||
}
|
}
|
||||||
if (!this.endpoint) {
|
if (this.endpoint) {
|
||||||
|
hf.endpoint(this.endpoint)
|
||||||
|
} else {
|
||||||
obj.model = this.model
|
obj.model = this.model
|
||||||
}
|
}
|
||||||
|
|
||||||
const res = await this.caller.callWithOptions({}, this.client.featureExtraction.bind(this.client), obj)
|
const res = await this.caller.callWithOptions({}, hf.featureExtraction.bind(hf), obj)
|
||||||
return res as number[][]
|
return res as number[][]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,7 @@ class SubQuestionQueryEngine_LlamaIndex implements INode {
|
||||||
this.icon = 'subQueryEngine.svg'
|
this.icon = 'subQueryEngine.svg'
|
||||||
this.category = 'Engine'
|
this.category = 'Engine'
|
||||||
this.description =
|
this.description =
|
||||||
'Breaks complex query into sub questions for each relevant data source, then gather all the intermediate responses and synthesizes a final response'
|
'Breaks complex query into sub questions for each relevant data source, then gather all the intermediate reponses and synthesizes a final response'
|
||||||
this.baseClasses = [this.type, 'BaseQueryEngine']
|
this.baseClasses = [this.type, 'BaseQueryEngine']
|
||||||
this.tags = ['LlamaIndex']
|
this.tags = ['LlamaIndex']
|
||||||
this.inputs = [
|
this.inputs = [
|
||||||
|
|
|
||||||
|
|
@ -78,8 +78,6 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
||||||
async _call(prompt: string, options: this['ParsedCallOptions']): Promise<string> {
|
async _call(prompt: string, options: this['ParsedCallOptions']): Promise<string> {
|
||||||
const { HfInference } = await HuggingFaceInference.imports()
|
const { HfInference } = await HuggingFaceInference.imports()
|
||||||
const hf = new HfInference(this.apiKey)
|
const hf = new HfInference(this.apiKey)
|
||||||
// v4 uses Inference Providers by default; only override if custom endpoint provided
|
|
||||||
const hfClient = this.endpoint ? hf.endpoint(this.endpoint) : hf
|
|
||||||
const obj: any = {
|
const obj: any = {
|
||||||
parameters: {
|
parameters: {
|
||||||
// make it behave similar to openai, returning only the generated text
|
// make it behave similar to openai, returning only the generated text
|
||||||
|
|
@ -92,10 +90,12 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
||||||
},
|
},
|
||||||
inputs: prompt
|
inputs: prompt
|
||||||
}
|
}
|
||||||
if (!this.endpoint) {
|
if (this.endpoint) {
|
||||||
|
hf.endpoint(this.endpoint)
|
||||||
|
} else {
|
||||||
obj.model = this.model
|
obj.model = this.model
|
||||||
}
|
}
|
||||||
const res = await this.caller.callWithOptions({ signal: options.signal }, hfClient.textGeneration.bind(hfClient), obj)
|
const res = await this.caller.callWithOptions({ signal: options.signal }, hf.textGeneration.bind(hf), obj)
|
||||||
return res.generated_text
|
return res.generated_text
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,6 @@ import { ChatOpenAI } from '../../chatmodels/ChatOpenAI/FlowiseChatOpenAI'
|
||||||
import { ChatAnthropic } from '../../chatmodels/ChatAnthropic/FlowiseChatAnthropic'
|
import { ChatAnthropic } from '../../chatmodels/ChatAnthropic/FlowiseChatAnthropic'
|
||||||
import { addImagesToMessages, llmSupportsVision } from '../../../src/multiModalUtils'
|
import { addImagesToMessages, llmSupportsVision } from '../../../src/multiModalUtils'
|
||||||
import { ChatGoogleGenerativeAI } from '../../chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI'
|
import { ChatGoogleGenerativeAI } from '../../chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI'
|
||||||
import { AzureChatOpenAI } from '../../chatmodels/AzureChatOpenAI/FlowiseAzureChatOpenAI'
|
|
||||||
|
|
||||||
const sysPrompt = `You are a supervisor tasked with managing a conversation between the following workers: {team_members}.
|
const sysPrompt = `You are a supervisor tasked with managing a conversation between the following workers: {team_members}.
|
||||||
Given the following user request, respond with the worker to act next.
|
Given the following user request, respond with the worker to act next.
|
||||||
|
|
@ -243,7 +242,7 @@ class Supervisor_MultiAgents implements INode {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
} else if (llm instanceof ChatOpenAI || llm instanceof AzureChatOpenAI) {
|
} else if (llm instanceof ChatOpenAI) {
|
||||||
let prompt = ChatPromptTemplate.fromMessages([
|
let prompt = ChatPromptTemplate.fromMessages([
|
||||||
['system', systemPrompt],
|
['system', systemPrompt],
|
||||||
new MessagesPlaceholder('messages'),
|
new MessagesPlaceholder('messages'),
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ return [
|
||||||
tool_calls: [
|
tool_calls: [
|
||||||
{
|
{
|
||||||
id: "12345",
|
id: "12345",
|
||||||
name: "calculator",
|
name: "calulator",
|
||||||
args: {
|
args: {
|
||||||
number1: 333382,
|
number1: 333382,
|
||||||
number2: 1932,
|
number2: 1932,
|
||||||
|
|
|
||||||
|
|
@ -62,6 +62,7 @@ class MySQLRecordManager_RecordManager implements INode {
|
||||||
label: 'Namespace',
|
label: 'Namespace',
|
||||||
name: 'namespace',
|
name: 'namespace',
|
||||||
type: 'string',
|
type: 'string',
|
||||||
|
description: 'If not specified, chatflowid will be used',
|
||||||
additionalParams: true,
|
additionalParams: true,
|
||||||
optional: true
|
optional: true
|
||||||
},
|
},
|
||||||
|
|
@ -204,8 +205,8 @@ class MySQLRecordManager implements RecordManagerInterface {
|
||||||
}
|
}
|
||||||
|
|
||||||
async createSchema(): Promise<void> {
|
async createSchema(): Promise<void> {
|
||||||
const dataSource = await this.getDataSource()
|
|
||||||
try {
|
try {
|
||||||
|
const dataSource = await this.getDataSource()
|
||||||
const queryRunner = dataSource.createQueryRunner()
|
const queryRunner = dataSource.createQueryRunner()
|
||||||
const tableName = this.sanitizeTableName(this.tableName)
|
const tableName = this.sanitizeTableName(this.tableName)
|
||||||
|
|
||||||
|
|
@ -218,16 +219,7 @@ class MySQLRecordManager implements RecordManagerInterface {
|
||||||
unique key \`unique_key_namespace\` (\`key\`,
|
unique key \`unique_key_namespace\` (\`key\`,
|
||||||
\`namespace\`));`)
|
\`namespace\`));`)
|
||||||
|
|
||||||
// Add doc_id column if it doesn't exist (migration for existing tables)
|
const columns = [`updated_at`, `key`, `namespace`, `group_id`]
|
||||||
const checkColumn = await queryRunner.manager.query(
|
|
||||||
`SELECT COUNT(1) ColumnExists FROM INFORMATION_SCHEMA.COLUMNS
|
|
||||||
WHERE table_schema=DATABASE() AND table_name='${tableName}' AND column_name='doc_id';`
|
|
||||||
)
|
|
||||||
if (checkColumn[0].ColumnExists === 0) {
|
|
||||||
await queryRunner.manager.query(`ALTER TABLE \`${tableName}\` ADD COLUMN \`doc_id\` longtext;`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const columns = [`updated_at`, `key`, `namespace`, `group_id`, `doc_id`]
|
|
||||||
for (const column of columns) {
|
for (const column of columns) {
|
||||||
// MySQL does not support 'IF NOT EXISTS' function for Index
|
// MySQL does not support 'IF NOT EXISTS' function for Index
|
||||||
const Check = await queryRunner.manager.query(
|
const Check = await queryRunner.manager.query(
|
||||||
|
|
@ -249,8 +241,6 @@ class MySQLRecordManager implements RecordManagerInterface {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
throw e
|
throw e
|
||||||
} finally {
|
|
||||||
await dataSource.destroy()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -269,7 +259,7 @@ class MySQLRecordManager implements RecordManagerInterface {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async update(keys: Array<{ uid: string; docId: string }> | string[], updateOptions?: UpdateOptions): Promise<void> {
|
async update(keys: string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||||
if (keys.length === 0) {
|
if (keys.length === 0) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
@ -285,23 +275,23 @@ class MySQLRecordManager implements RecordManagerInterface {
|
||||||
throw new Error(`Time sync issue with database ${updatedAt} < ${timeAtLeast}`)
|
throw new Error(`Time sync issue with database ${updatedAt} < ${timeAtLeast}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle both new format (objects with uid and docId) and old format (strings)
|
const groupIds = _groupIds ?? keys.map(() => null)
|
||||||
const isNewFormat = keys.length > 0 && typeof keys[0] === 'object' && 'uid' in keys[0]
|
|
||||||
const keyStrings = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.uid) : (keys as string[])
|
|
||||||
const docIds = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.docId) : keys.map(() => null)
|
|
||||||
|
|
||||||
const groupIds = _groupIds ?? keyStrings.map(() => null)
|
if (groupIds.length !== keys.length) {
|
||||||
|
throw new Error(`Number of keys (${keys.length}) does not match number of group_ids (${groupIds.length})`)
|
||||||
if (groupIds.length !== keyStrings.length) {
|
|
||||||
throw new Error(`Number of keys (${keyStrings.length}) does not match number of group_ids (${groupIds.length})`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const recordsToUpsert = keyStrings.map((key, i) => [key, this.namespace, updatedAt, groupIds[i] ?? null, docIds[i] ?? null])
|
const recordsToUpsert = keys.map((key, i) => [
|
||||||
|
key,
|
||||||
|
this.namespace,
|
||||||
|
updatedAt,
|
||||||
|
groupIds[i] ?? null // Ensure groupIds[i] is null if undefined
|
||||||
|
])
|
||||||
|
|
||||||
const query = `
|
const query = `
|
||||||
INSERT INTO \`${tableName}\` (\`key\`, \`namespace\`, \`updated_at\`, \`group_id\`, \`doc_id\`)
|
INSERT INTO \`${tableName}\` (\`key\`, \`namespace\`, \`updated_at\`, \`group_id\`)
|
||||||
VALUES (?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?)
|
||||||
ON DUPLICATE KEY UPDATE \`updated_at\` = VALUES(\`updated_at\`), \`doc_id\` = VALUES(\`doc_id\`)`
|
ON DUPLICATE KEY UPDATE \`updated_at\` = VALUES(\`updated_at\`)`
|
||||||
|
|
||||||
// To handle multiple files upsert
|
// To handle multiple files upsert
|
||||||
try {
|
try {
|
||||||
|
|
@ -357,13 +347,13 @@ class MySQLRecordManager implements RecordManagerInterface {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async listKeys(options?: ListKeyOptions & { docId?: string }): Promise<string[]> {
|
async listKeys(options?: ListKeyOptions): Promise<string[]> {
|
||||||
const dataSource = await this.getDataSource()
|
const dataSource = await this.getDataSource()
|
||||||
const queryRunner = dataSource.createQueryRunner()
|
const queryRunner = dataSource.createQueryRunner()
|
||||||
const tableName = this.sanitizeTableName(this.tableName)
|
const tableName = this.sanitizeTableName(this.tableName)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { before, after, limit, groupIds, docId } = options ?? {}
|
const { before, after, limit, groupIds } = options ?? {}
|
||||||
let query = `SELECT \`key\` FROM \`${tableName}\` WHERE \`namespace\` = ?`
|
let query = `SELECT \`key\` FROM \`${tableName}\` WHERE \`namespace\` = ?`
|
||||||
const values: (string | number | string[])[] = [this.namespace]
|
const values: (string | number | string[])[] = [this.namespace]
|
||||||
|
|
||||||
|
|
@ -390,11 +380,6 @@ class MySQLRecordManager implements RecordManagerInterface {
|
||||||
values.push(...groupIds.filter((gid): gid is string => gid !== null))
|
values.push(...groupIds.filter((gid): gid is string => gid !== null))
|
||||||
}
|
}
|
||||||
|
|
||||||
if (docId) {
|
|
||||||
query += ` AND \`doc_id\` = ?`
|
|
||||||
values.push(docId)
|
|
||||||
}
|
|
||||||
|
|
||||||
query += ';'
|
query += ';'
|
||||||
|
|
||||||
// Directly using try/catch with async/await for cleaner flow
|
// Directly using try/catch with async/await for cleaner flow
|
||||||
|
|
|
||||||
|
|
@ -78,6 +78,7 @@ class PostgresRecordManager_RecordManager implements INode {
|
||||||
label: 'Namespace',
|
label: 'Namespace',
|
||||||
name: 'namespace',
|
name: 'namespace',
|
||||||
type: 'string',
|
type: 'string',
|
||||||
|
description: 'If not specified, chatflowid will be used',
|
||||||
additionalParams: true,
|
additionalParams: true,
|
||||||
optional: true
|
optional: true
|
||||||
},
|
},
|
||||||
|
|
@ -221,8 +222,8 @@ class PostgresRecordManager implements RecordManagerInterface {
|
||||||
}
|
}
|
||||||
|
|
||||||
async createSchema(): Promise<void> {
|
async createSchema(): Promise<void> {
|
||||||
const dataSource = await this.getDataSource()
|
|
||||||
try {
|
try {
|
||||||
|
const dataSource = await this.getDataSource()
|
||||||
const queryRunner = dataSource.createQueryRunner()
|
const queryRunner = dataSource.createQueryRunner()
|
||||||
const tableName = this.sanitizeTableName(this.tableName)
|
const tableName = this.sanitizeTableName(this.tableName)
|
||||||
|
|
||||||
|
|
@ -240,19 +241,6 @@ class PostgresRecordManager implements RecordManagerInterface {
|
||||||
CREATE INDEX IF NOT EXISTS namespace_index ON "${tableName}" (namespace);
|
CREATE INDEX IF NOT EXISTS namespace_index ON "${tableName}" (namespace);
|
||||||
CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
||||||
|
|
||||||
// Add doc_id column if it doesn't exist (migration for existing tables)
|
|
||||||
await queryRunner.manager.query(`
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM information_schema.columns
|
|
||||||
WHERE table_name = '${tableName}' AND column_name = 'doc_id'
|
|
||||||
) THEN
|
|
||||||
ALTER TABLE "${tableName}" ADD COLUMN doc_id TEXT;
|
|
||||||
CREATE INDEX IF NOT EXISTS doc_id_index ON "${tableName}" (doc_id);
|
|
||||||
END IF;
|
|
||||||
END $$;`)
|
|
||||||
|
|
||||||
await queryRunner.release()
|
await queryRunner.release()
|
||||||
} catch (e: any) {
|
} catch (e: any) {
|
||||||
// This error indicates that the table already exists
|
// This error indicates that the table already exists
|
||||||
|
|
@ -263,8 +251,6 @@ class PostgresRecordManager implements RecordManagerInterface {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
throw e
|
throw e
|
||||||
} finally {
|
|
||||||
await dataSource.destroy()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -298,7 +284,7 @@ class PostgresRecordManager implements RecordManagerInterface {
|
||||||
return `(${placeholders.join(', ')})`
|
return `(${placeholders.join(', ')})`
|
||||||
}
|
}
|
||||||
|
|
||||||
async update(keys: Array<{ uid: string; docId: string }> | string[], updateOptions?: UpdateOptions): Promise<void> {
|
async update(keys: string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||||
if (keys.length === 0) {
|
if (keys.length === 0) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
@ -314,22 +300,17 @@ class PostgresRecordManager implements RecordManagerInterface {
|
||||||
throw new Error(`Time sync issue with database ${updatedAt} < ${timeAtLeast}`)
|
throw new Error(`Time sync issue with database ${updatedAt} < ${timeAtLeast}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle both new format (objects with uid and docId) and old format (strings)
|
const groupIds = _groupIds ?? keys.map(() => null)
|
||||||
const isNewFormat = keys.length > 0 && typeof keys[0] === 'object' && 'uid' in keys[0]
|
|
||||||
const keyStrings = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.uid) : (keys as string[])
|
|
||||||
const docIds = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.docId) : keys.map(() => null)
|
|
||||||
|
|
||||||
const groupIds = _groupIds ?? keyStrings.map(() => null)
|
if (groupIds.length !== keys.length) {
|
||||||
|
throw new Error(`Number of keys (${keys.length}) does not match number of group_ids ${groupIds.length})`)
|
||||||
if (groupIds.length !== keyStrings.length) {
|
|
||||||
throw new Error(`Number of keys (${keyStrings.length}) does not match number of group_ids ${groupIds.length})`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const recordsToUpsert = keyStrings.map((key, i) => [key, this.namespace, updatedAt, groupIds[i], docIds[i]])
|
const recordsToUpsert = keys.map((key, i) => [key, this.namespace, updatedAt, groupIds[i]])
|
||||||
|
|
||||||
const valuesPlaceholders = recordsToUpsert.map((_, j) => this.generatePlaceholderForRowAt(j, recordsToUpsert[0].length)).join(', ')
|
const valuesPlaceholders = recordsToUpsert.map((_, j) => this.generatePlaceholderForRowAt(j, recordsToUpsert[0].length)).join(', ')
|
||||||
|
|
||||||
const query = `INSERT INTO "${tableName}" (key, namespace, updated_at, group_id, doc_id) VALUES ${valuesPlaceholders} ON CONFLICT (key, namespace) DO UPDATE SET updated_at = EXCLUDED.updated_at, doc_id = EXCLUDED.doc_id;`
|
const query = `INSERT INTO "${tableName}" (key, namespace, updated_at, group_id) VALUES ${valuesPlaceholders} ON CONFLICT (key, namespace) DO UPDATE SET updated_at = EXCLUDED.updated_at;`
|
||||||
try {
|
try {
|
||||||
await queryRunner.manager.query(query, recordsToUpsert.flat())
|
await queryRunner.manager.query(query, recordsToUpsert.flat())
|
||||||
await queryRunner.release()
|
await queryRunner.release()
|
||||||
|
|
@ -368,8 +349,8 @@ class PostgresRecordManager implements RecordManagerInterface {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async listKeys(options?: ListKeyOptions & { docId?: string }): Promise<string[]> {
|
async listKeys(options?: ListKeyOptions): Promise<string[]> {
|
||||||
const { before, after, limit, groupIds, docId } = options ?? {}
|
const { before, after, limit, groupIds } = options ?? {}
|
||||||
const tableName = this.sanitizeTableName(this.tableName)
|
const tableName = this.sanitizeTableName(this.tableName)
|
||||||
|
|
||||||
let query = `SELECT key FROM "${tableName}" WHERE namespace = $1`
|
let query = `SELECT key FROM "${tableName}" WHERE namespace = $1`
|
||||||
|
|
@ -400,12 +381,6 @@ class PostgresRecordManager implements RecordManagerInterface {
|
||||||
index += 1
|
index += 1
|
||||||
}
|
}
|
||||||
|
|
||||||
if (docId) {
|
|
||||||
values.push(docId)
|
|
||||||
query += ` AND doc_id = $${index}`
|
|
||||||
index += 1
|
|
||||||
}
|
|
||||||
|
|
||||||
query += ';'
|
query += ';'
|
||||||
|
|
||||||
const dataSource = await this.getDataSource()
|
const dataSource = await this.getDataSource()
|
||||||
|
|
|
||||||
|
|
@ -51,6 +51,7 @@ class SQLiteRecordManager_RecordManager implements INode {
|
||||||
label: 'Namespace',
|
label: 'Namespace',
|
||||||
name: 'namespace',
|
name: 'namespace',
|
||||||
type: 'string',
|
type: 'string',
|
||||||
|
description: 'If not specified, chatflowid will be used',
|
||||||
additionalParams: true,
|
additionalParams: true,
|
||||||
optional: true
|
optional: true
|
||||||
},
|
},
|
||||||
|
|
@ -178,8 +179,8 @@ class SQLiteRecordManager implements RecordManagerInterface {
|
||||||
}
|
}
|
||||||
|
|
||||||
async createSchema(): Promise<void> {
|
async createSchema(): Promise<void> {
|
||||||
const dataSource = await this.getDataSource()
|
|
||||||
try {
|
try {
|
||||||
|
const dataSource = await this.getDataSource()
|
||||||
const queryRunner = dataSource.createQueryRunner()
|
const queryRunner = dataSource.createQueryRunner()
|
||||||
const tableName = this.sanitizeTableName(this.tableName)
|
const tableName = this.sanitizeTableName(this.tableName)
|
||||||
|
|
||||||
|
|
@ -197,15 +198,6 @@ CREATE INDEX IF NOT EXISTS key_index ON "${tableName}" (key);
|
||||||
CREATE INDEX IF NOT EXISTS namespace_index ON "${tableName}" (namespace);
|
CREATE INDEX IF NOT EXISTS namespace_index ON "${tableName}" (namespace);
|
||||||
CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
||||||
|
|
||||||
// Add doc_id column if it doesn't exist (migration for existing tables)
|
|
||||||
const checkColumn = await queryRunner.manager.query(
|
|
||||||
`SELECT COUNT(*) as count FROM pragma_table_info('${tableName}') WHERE name='doc_id';`
|
|
||||||
)
|
|
||||||
if (checkColumn[0].count === 0) {
|
|
||||||
await queryRunner.manager.query(`ALTER TABLE "${tableName}" ADD COLUMN doc_id TEXT;`)
|
|
||||||
await queryRunner.manager.query(`CREATE INDEX IF NOT EXISTS doc_id_index ON "${tableName}" (doc_id);`)
|
|
||||||
}
|
|
||||||
|
|
||||||
await queryRunner.release()
|
await queryRunner.release()
|
||||||
} catch (e: any) {
|
} catch (e: any) {
|
||||||
// This error indicates that the table already exists
|
// This error indicates that the table already exists
|
||||||
|
|
@ -216,8 +208,6 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
throw e
|
throw e
|
||||||
} finally {
|
|
||||||
await dataSource.destroy()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -236,7 +226,7 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async update(keys: Array<{ uid: string; docId: string }> | string[], updateOptions?: UpdateOptions): Promise<void> {
|
async update(keys: string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||||
if (keys.length === 0) {
|
if (keys.length === 0) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
@ -251,23 +241,23 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
||||||
throw new Error(`Time sync issue with database ${updatedAt} < ${timeAtLeast}`)
|
throw new Error(`Time sync issue with database ${updatedAt} < ${timeAtLeast}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle both new format (objects with uid and docId) and old format (strings)
|
const groupIds = _groupIds ?? keys.map(() => null)
|
||||||
const isNewFormat = keys.length > 0 && typeof keys[0] === 'object' && 'uid' in keys[0]
|
|
||||||
const keyStrings = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.uid) : (keys as string[])
|
|
||||||
const docIds = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.docId) : keys.map(() => null)
|
|
||||||
|
|
||||||
const groupIds = _groupIds ?? keyStrings.map(() => null)
|
if (groupIds.length !== keys.length) {
|
||||||
|
throw new Error(`Number of keys (${keys.length}) does not match number of group_ids (${groupIds.length})`)
|
||||||
if (groupIds.length !== keyStrings.length) {
|
|
||||||
throw new Error(`Number of keys (${keyStrings.length}) does not match number of group_ids (${groupIds.length})`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const recordsToUpsert = keyStrings.map((key, i) => [key, this.namespace, updatedAt, groupIds[i] ?? null, docIds[i] ?? null])
|
const recordsToUpsert = keys.map((key, i) => [
|
||||||
|
key,
|
||||||
|
this.namespace,
|
||||||
|
updatedAt,
|
||||||
|
groupIds[i] ?? null // Ensure groupIds[i] is null if undefined
|
||||||
|
])
|
||||||
|
|
||||||
const query = `
|
const query = `
|
||||||
INSERT INTO "${tableName}" (key, namespace, updated_at, group_id, doc_id)
|
INSERT INTO "${tableName}" (key, namespace, updated_at, group_id)
|
||||||
VALUES (?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?)
|
||||||
ON CONFLICT (key, namespace) DO UPDATE SET updated_at = excluded.updated_at, doc_id = excluded.doc_id`
|
ON CONFLICT (key, namespace) DO UPDATE SET updated_at = excluded.updated_at`
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// To handle multiple files upsert
|
// To handle multiple files upsert
|
||||||
|
|
@ -322,8 +312,8 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async listKeys(options?: ListKeyOptions & { docId?: string }): Promise<string[]> {
|
async listKeys(options?: ListKeyOptions): Promise<string[]> {
|
||||||
const { before, after, limit, groupIds, docId } = options ?? {}
|
const { before, after, limit, groupIds } = options ?? {}
|
||||||
const tableName = this.sanitizeTableName(this.tableName)
|
const tableName = this.sanitizeTableName(this.tableName)
|
||||||
|
|
||||||
let query = `SELECT key FROM "${tableName}" WHERE namespace = ?`
|
let query = `SELECT key FROM "${tableName}" WHERE namespace = ?`
|
||||||
|
|
@ -352,11 +342,6 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
||||||
values.push(...groupIds.filter((gid): gid is string => gid !== null))
|
values.push(...groupIds.filter((gid): gid is string => gid !== null))
|
||||||
}
|
}
|
||||||
|
|
||||||
if (docId) {
|
|
||||||
query += ` AND doc_id = ?`
|
|
||||||
values.push(docId)
|
|
||||||
}
|
|
||||||
|
|
||||||
query += ';'
|
query += ';'
|
||||||
|
|
||||||
const dataSource = await this.getDataSource()
|
const dataSource = await this.getDataSource()
|
||||||
|
|
|
||||||
|
|
@ -238,7 +238,7 @@ export function filterConversationHistory(
|
||||||
export const restructureMessages = (llm: BaseChatModel, state: ISeqAgentsState) => {
|
export const restructureMessages = (llm: BaseChatModel, state: ISeqAgentsState) => {
|
||||||
const messages: BaseMessage[] = []
|
const messages: BaseMessage[] = []
|
||||||
for (const message of state.messages as unknown as BaseMessage[]) {
|
for (const message of state.messages as unknown as BaseMessage[]) {
|
||||||
// Sometimes Anthropic can return a message with content types of array, ignore that EXCEPT when tool calls are present
|
// Sometimes Anthropic can return a message with content types of array, ignore that EXECEPT when tool calls are present
|
||||||
if ((message as any).tool_calls?.length && message.content !== '') {
|
if ((message as any).tool_calls?.length && message.content !== '') {
|
||||||
message.content = JSON.stringify(message.content)
|
message.content = JSON.stringify(message.content)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,13 +4,7 @@ import { RunnableConfig } from '@langchain/core/runnables'
|
||||||
import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager'
|
import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager'
|
||||||
import { StructuredTool } from '@langchain/core/tools'
|
import { StructuredTool } from '@langchain/core/tools'
|
||||||
import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
|
import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
|
||||||
import {
|
import { getCredentialData, getCredentialParam, executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils'
|
||||||
getCredentialData,
|
|
||||||
getCredentialParam,
|
|
||||||
executeJavaScriptCode,
|
|
||||||
createCodeExecutionSandbox,
|
|
||||||
parseWithTypeConversion
|
|
||||||
} from '../../../src/utils'
|
|
||||||
import { isValidUUID, isValidURL } from '../../../src/validator'
|
import { isValidUUID, isValidURL } from '../../../src/validator'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
|
|
||||||
|
|
@ -279,7 +273,7 @@ class AgentflowTool extends StructuredTool {
|
||||||
}
|
}
|
||||||
let parsed
|
let parsed
|
||||||
try {
|
try {
|
||||||
parsed = await parseWithTypeConversion(this.schema, arg)
|
parsed = await this.schema.parseAsync(arg)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw new Error(`Received tool input did not match expected schema: ${JSON.stringify(arg)}`)
|
throw new Error(`Received tool input did not match expected schema: ${JSON.stringify(arg)}`)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,13 +4,7 @@ import { RunnableConfig } from '@langchain/core/runnables'
|
||||||
import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager'
|
import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager'
|
||||||
import { StructuredTool } from '@langchain/core/tools'
|
import { StructuredTool } from '@langchain/core/tools'
|
||||||
import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
|
import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
|
||||||
import {
|
import { getCredentialData, getCredentialParam, executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils'
|
||||||
getCredentialData,
|
|
||||||
getCredentialParam,
|
|
||||||
executeJavaScriptCode,
|
|
||||||
createCodeExecutionSandbox,
|
|
||||||
parseWithTypeConversion
|
|
||||||
} from '../../../src/utils'
|
|
||||||
import { isValidUUID, isValidURL } from '../../../src/validator'
|
import { isValidUUID, isValidURL } from '../../../src/validator'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
|
|
||||||
|
|
@ -287,7 +281,7 @@ class ChatflowTool extends StructuredTool {
|
||||||
}
|
}
|
||||||
let parsed
|
let parsed
|
||||||
try {
|
try {
|
||||||
parsed = await parseWithTypeConversion(this.schema, arg)
|
parsed = await this.schema.parseAsync(arg)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw new Error(`Received tool input did not match expected schema: ${JSON.stringify(arg)}`)
|
throw new Error(`Received tool input did not match expected schema: ${JSON.stringify(arg)}`)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||||
import { getBaseClasses, getCredentialData, getCredentialParam, parseWithTypeConversion } from '../../../src/utils'
|
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||||
import { StructuredTool, ToolInputParsingException, ToolParams } from '@langchain/core/tools'
|
import { StructuredTool, ToolInputParsingException, ToolParams } from '@langchain/core/tools'
|
||||||
import { Sandbox } from '@e2b/code-interpreter'
|
import { Sandbox } from '@e2b/code-interpreter'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
|
|
@ -159,7 +159,7 @@ export class E2BTool extends StructuredTool {
|
||||||
}
|
}
|
||||||
let parsed
|
let parsed
|
||||||
try {
|
try {
|
||||||
parsed = await parseWithTypeConversion(this.schema, arg)
|
parsed = await this.schema.parseAsync(arg)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw new ToolInputParsingException(`Received tool input did not match expected schema`, JSON.stringify(arg))
|
throw new ToolInputParsingException(`Received tool input did not match expected schema`, JSON.stringify(arg))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ import { z } from 'zod'
|
||||||
import { RunnableConfig } from '@langchain/core/runnables'
|
import { RunnableConfig } from '@langchain/core/runnables'
|
||||||
import { StructuredTool, ToolParams } from '@langchain/core/tools'
|
import { StructuredTool, ToolParams } from '@langchain/core/tools'
|
||||||
import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager'
|
import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager'
|
||||||
import { executeJavaScriptCode, createCodeExecutionSandbox, parseWithTypeConversion } from '../../../src/utils'
|
import { executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils'
|
||||||
import { ICommonObject } from '../../../src/Interface'
|
import { ICommonObject } from '../../../src/Interface'
|
||||||
|
|
||||||
class ToolInputParsingException extends Error {
|
class ToolInputParsingException extends Error {
|
||||||
|
|
@ -68,7 +68,7 @@ export class DynamicStructuredTool<
|
||||||
}
|
}
|
||||||
let parsed
|
let parsed
|
||||||
try {
|
try {
|
||||||
parsed = await parseWithTypeConversion(this.schema, arg)
|
parsed = await this.schema.parseAsync(arg)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw new ToolInputParsingException(`Received tool input did not match expected schema`, JSON.stringify(arg))
|
throw new ToolInputParsingException(`Received tool input did not match expected schema`, JSON.stringify(arg))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -272,22 +272,6 @@ class GoogleCalendar_Tools implements INode {
|
||||||
additionalParams: true,
|
additionalParams: true,
|
||||||
optional: true
|
optional: true
|
||||||
},
|
},
|
||||||
{
|
|
||||||
label: 'Send Updates to',
|
|
||||||
name: 'sendUpdates',
|
|
||||||
type: 'options',
|
|
||||||
description: 'Send Updates to attendees',
|
|
||||||
options: [
|
|
||||||
{ label: 'All', name: 'all' },
|
|
||||||
{ label: 'External Only', name: 'externalOnly' },
|
|
||||||
{ label: 'None', name: 'none' }
|
|
||||||
],
|
|
||||||
show: {
|
|
||||||
eventActions: ['createEvent', 'updateEvent']
|
|
||||||
},
|
|
||||||
additionalParams: true,
|
|
||||||
optional: true
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
label: 'Recurrence Rules',
|
label: 'Recurrence Rules',
|
||||||
name: 'recurrence',
|
name: 'recurrence',
|
||||||
|
|
@ -576,6 +560,7 @@ class GoogleCalendar_Tools implements INode {
|
||||||
}
|
}
|
||||||
|
|
||||||
const defaultParams = this.transformNodeInputsToToolArgs(nodeData)
|
const defaultParams = this.transformNodeInputsToToolArgs(nodeData)
|
||||||
|
|
||||||
const tools = createGoogleCalendarTools({
|
const tools = createGoogleCalendarTools({
|
||||||
accessToken,
|
accessToken,
|
||||||
actions,
|
actions,
|
||||||
|
|
@ -602,7 +587,6 @@ class GoogleCalendar_Tools implements INode {
|
||||||
if (nodeData.inputs?.startDate) defaultParams.startDate = nodeData.inputs.startDate
|
if (nodeData.inputs?.startDate) defaultParams.startDate = nodeData.inputs.startDate
|
||||||
if (nodeData.inputs?.endDate) defaultParams.endDate = nodeData.inputs.endDate
|
if (nodeData.inputs?.endDate) defaultParams.endDate = nodeData.inputs.endDate
|
||||||
if (nodeData.inputs?.attendees) defaultParams.attendees = nodeData.inputs.attendees
|
if (nodeData.inputs?.attendees) defaultParams.attendees = nodeData.inputs.attendees
|
||||||
if (nodeData.inputs?.sendUpdates) defaultParams.sendUpdates = nodeData.inputs.sendUpdates
|
|
||||||
if (nodeData.inputs?.recurrence) defaultParams.recurrence = nodeData.inputs.recurrence
|
if (nodeData.inputs?.recurrence) defaultParams.recurrence = nodeData.inputs.recurrence
|
||||||
if (nodeData.inputs?.reminderMinutes) defaultParams.reminderMinutes = nodeData.inputs.reminderMinutes
|
if (nodeData.inputs?.reminderMinutes) defaultParams.reminderMinutes = nodeData.inputs.reminderMinutes
|
||||||
if (nodeData.inputs?.visibility) defaultParams.visibility = nodeData.inputs.visibility
|
if (nodeData.inputs?.visibility) defaultParams.visibility = nodeData.inputs.visibility
|
||||||
|
|
|
||||||
|
|
@ -48,7 +48,6 @@ const CreateEventSchema = z.object({
|
||||||
endDate: z.string().optional().describe('End date for all-day events (YYYY-MM-DD)'),
|
endDate: z.string().optional().describe('End date for all-day events (YYYY-MM-DD)'),
|
||||||
timeZone: z.string().optional().describe('Time zone (e.g., America/New_York)'),
|
timeZone: z.string().optional().describe('Time zone (e.g., America/New_York)'),
|
||||||
attendees: z.string().optional().describe('Comma-separated list of attendee emails'),
|
attendees: z.string().optional().describe('Comma-separated list of attendee emails'),
|
||||||
sendUpdates: z.enum(['all', 'externalOnly', 'none']).optional().default('all').describe('Whether to send notifications to attendees'),
|
|
||||||
recurrence: z.string().optional().describe('Recurrence rules (RRULE format)'),
|
recurrence: z.string().optional().describe('Recurrence rules (RRULE format)'),
|
||||||
reminderMinutes: z.number().optional().describe('Minutes before event to send reminder'),
|
reminderMinutes: z.number().optional().describe('Minutes before event to send reminder'),
|
||||||
visibility: z.enum(['default', 'public', 'private', 'confidential']).optional().describe('Event visibility')
|
visibility: z.enum(['default', 'public', 'private', 'confidential']).optional().describe('Event visibility')
|
||||||
|
|
@ -71,7 +70,6 @@ const UpdateEventSchema = z.object({
|
||||||
endDate: z.string().optional().describe('Updated end date for all-day events (YYYY-MM-DD)'),
|
endDate: z.string().optional().describe('Updated end date for all-day events (YYYY-MM-DD)'),
|
||||||
timeZone: z.string().optional().describe('Updated time zone'),
|
timeZone: z.string().optional().describe('Updated time zone'),
|
||||||
attendees: z.string().optional().describe('Updated comma-separated list of attendee emails'),
|
attendees: z.string().optional().describe('Updated comma-separated list of attendee emails'),
|
||||||
sendUpdates: z.enum(['all', 'externalOnly', 'none']).optional().default('all').describe('Whether to send notifications to attendees'),
|
|
||||||
recurrence: z.string().optional().describe('Updated recurrence rules'),
|
recurrence: z.string().optional().describe('Updated recurrence rules'),
|
||||||
reminderMinutes: z.number().optional().describe('Updated reminder minutes'),
|
reminderMinutes: z.number().optional().describe('Updated reminder minutes'),
|
||||||
visibility: z.enum(['default', 'public', 'private', 'confidential']).optional().describe('Updated event visibility')
|
visibility: z.enum(['default', 'public', 'private', 'confidential']).optional().describe('Updated event visibility')
|
||||||
|
|
@ -288,11 +286,8 @@ class CreateEventTool extends BaseGoogleCalendarTool {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (params.visibility) eventData.visibility = params.visibility
|
if (params.visibility) eventData.visibility = params.visibility
|
||||||
const queryParams = new URLSearchParams()
|
|
||||||
if (params.sendUpdates) queryParams.append('sendUpdates', params.sendUpdates)
|
|
||||||
|
|
||||||
const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/events?${queryParams.toString()}`
|
|
||||||
|
|
||||||
|
const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/events`
|
||||||
const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'POST', body: eventData, params })
|
const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'POST', body: eventData, params })
|
||||||
return response
|
return response
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
@ -400,12 +395,8 @@ class UpdateEventTool extends BaseGoogleCalendarTool {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (params.visibility) updateData.visibility = params.visibility
|
if (params.visibility) updateData.visibility = params.visibility
|
||||||
const queryParams = new URLSearchParams()
|
|
||||||
if (params.sendUpdates) queryParams.append('sendUpdates', params.sendUpdates)
|
|
||||||
|
|
||||||
const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/events/${encodeURIComponent(
|
const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/events/${encodeURIComponent(params.eventId)}`
|
||||||
params.eventId
|
|
||||||
)}?${queryParams.toString()}`
|
|
||||||
const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'PUT', body: updateData, params })
|
const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'PUT', body: updateData, params })
|
||||||
return response
|
return response
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
|
||||||
|
|
@ -136,17 +136,17 @@ class Custom_MCP implements INode {
|
||||||
}
|
}
|
||||||
|
|
||||||
let sandbox: ICommonObject = {}
|
let sandbox: ICommonObject = {}
|
||||||
const workspaceId = options?.searchOptions?.workspaceId?._value || options?.workspaceId
|
|
||||||
|
|
||||||
if (mcpServerConfig.includes('$vars')) {
|
if (mcpServerConfig.includes('$vars')) {
|
||||||
const appDataSource = options.appDataSource as DataSource
|
const appDataSource = options.appDataSource as DataSource
|
||||||
const databaseEntities = options.databaseEntities as IDatabaseEntity
|
const databaseEntities = options.databaseEntities as IDatabaseEntity
|
||||||
// If options.workspaceId is not set, create a new options object with the workspaceId for getVars.
|
|
||||||
const optionsWithWorkspaceId = options.workspaceId ? options : { ...options, workspaceId }
|
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
||||||
const variables = await getVars(appDataSource, databaseEntities, nodeData, optionsWithWorkspaceId)
|
|
||||||
sandbox['$vars'] = prepareSandboxVars(variables)
|
sandbox['$vars'] = prepareSandboxVars(variables)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const workspaceId = options?.searchOptions?.workspaceId?._value || options?.workspaceId
|
||||||
|
|
||||||
let canonicalConfig
|
let canonicalConfig
|
||||||
try {
|
try {
|
||||||
canonicalConfig = JSON.parse(mcpServerConfig)
|
canonicalConfig = JSON.parse(mcpServerConfig)
|
||||||
|
|
|
||||||
|
|
@ -114,7 +114,7 @@ export class MCPToolkit extends BaseToolkit {
|
||||||
const res = await Promise.allSettled(toolsPromises)
|
const res = await Promise.allSettled(toolsPromises)
|
||||||
const errors = res.filter((r) => r.status === 'rejected')
|
const errors = res.filter((r) => r.status === 'rejected')
|
||||||
if (errors.length !== 0) {
|
if (errors.length !== 0) {
|
||||||
console.error('MCP Tools failed to be resolved', errors)
|
console.error('MCP Tools falied to be resolved', errors)
|
||||||
}
|
}
|
||||||
const successes = res.filter((r) => r.status === 'fulfilled').map((r) => r.value)
|
const successes = res.filter((r) => r.status === 'fulfilled').map((r) => r.value)
|
||||||
return successes
|
return successes
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,6 @@ import $RefParser from '@apidevtools/json-schema-ref-parser'
|
||||||
import { z, ZodSchema, ZodTypeAny } from 'zod'
|
import { z, ZodSchema, ZodTypeAny } from 'zod'
|
||||||
import { defaultCode, DynamicStructuredTool, howToUseCode } from './core'
|
import { defaultCode, DynamicStructuredTool, howToUseCode } from './core'
|
||||||
import { DataSource } from 'typeorm'
|
import { DataSource } from 'typeorm'
|
||||||
import fetch from 'node-fetch'
|
|
||||||
|
|
||||||
class OpenAPIToolkit_Tools implements INode {
|
class OpenAPIToolkit_Tools implements INode {
|
||||||
label: string
|
label: string
|
||||||
|
|
@ -22,64 +21,17 @@ class OpenAPIToolkit_Tools implements INode {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.label = 'OpenAPI Toolkit'
|
this.label = 'OpenAPI Toolkit'
|
||||||
this.name = 'openAPIToolkit'
|
this.name = 'openAPIToolkit'
|
||||||
this.version = 2.1
|
this.version = 2.0
|
||||||
this.type = 'OpenAPIToolkit'
|
this.type = 'OpenAPIToolkit'
|
||||||
this.icon = 'openapi.svg'
|
this.icon = 'openapi.svg'
|
||||||
this.category = 'Tools'
|
this.category = 'Tools'
|
||||||
this.description = 'Load OpenAPI specification, and converts each API endpoint to a tool'
|
this.description = 'Load OpenAPI specification, and converts each API endpoint to a tool'
|
||||||
this.inputs = [
|
this.inputs = [
|
||||||
{
|
{
|
||||||
label: 'Input Type',
|
label: 'YAML File',
|
||||||
name: 'inputType',
|
name: 'yamlFile',
|
||||||
type: 'options',
|
|
||||||
options: [
|
|
||||||
{
|
|
||||||
label: 'Upload File',
|
|
||||||
name: 'file'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Provide Link',
|
|
||||||
name: 'link'
|
|
||||||
}
|
|
||||||
],
|
|
||||||
default: 'file',
|
|
||||||
description: 'Choose how to provide the OpenAPI specification'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'OpenAPI File',
|
|
||||||
name: 'openApiFile',
|
|
||||||
type: 'file',
|
type: 'file',
|
||||||
fileType: '.yaml,.json',
|
fileType: '.yaml'
|
||||||
description: 'Upload your OpenAPI specification file (YAML or JSON)',
|
|
||||||
show: {
|
|
||||||
inputType: 'file'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'OpenAPI Link',
|
|
||||||
name: 'openApiLink',
|
|
||||||
type: 'string',
|
|
||||||
placeholder: 'https://api.example.com/openapi.yaml or https://api.example.com/openapi.json',
|
|
||||||
description: 'Provide a link to your OpenAPI specification (YAML or JSON)',
|
|
||||||
show: {
|
|
||||||
inputType: 'link'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Server',
|
|
||||||
name: 'selectedServer',
|
|
||||||
type: 'asyncOptions',
|
|
||||||
loadMethod: 'listServers',
|
|
||||||
description: 'Select which server to use for API calls',
|
|
||||||
refresh: true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Available Endpoints',
|
|
||||||
name: 'selectedEndpoints',
|
|
||||||
type: 'asyncMultiOptions',
|
|
||||||
loadMethod: 'listEndpoints',
|
|
||||||
description: 'Select which endpoints to expose as tools',
|
|
||||||
refresh: true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Return Direct',
|
label: 'Return Direct',
|
||||||
|
|
@ -94,7 +46,8 @@ class OpenAPIToolkit_Tools implements INode {
|
||||||
type: 'json',
|
type: 'json',
|
||||||
description: 'Request headers to be sent with the API request. For example, {"Authorization": "Bearer token"}',
|
description: 'Request headers to be sent with the API request. For example, {"Authorization": "Bearer token"}',
|
||||||
additionalParams: true,
|
additionalParams: true,
|
||||||
optional: true
|
optional: true,
|
||||||
|
acceptVariable: true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Remove null parameters',
|
label: 'Remove null parameters',
|
||||||
|
|
@ -123,237 +76,49 @@ class OpenAPIToolkit_Tools implements INode {
|
||||||
|
|
||||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||||
const toolReturnDirect = nodeData.inputs?.returnDirect as boolean
|
const toolReturnDirect = nodeData.inputs?.returnDirect as boolean
|
||||||
const inputType = nodeData.inputs?.inputType as string
|
const yamlFileBase64 = nodeData.inputs?.yamlFile as string
|
||||||
const openApiFile = nodeData.inputs?.openApiFile as string
|
|
||||||
const openApiLink = nodeData.inputs?.openApiLink as string
|
|
||||||
const selectedServer = nodeData.inputs?.selectedServer as string
|
|
||||||
const customCode = nodeData.inputs?.customCode as string
|
const customCode = nodeData.inputs?.customCode as string
|
||||||
const _headers = nodeData.inputs?.headers as string
|
const _headers = nodeData.inputs?.headers as string
|
||||||
const removeNulls = nodeData.inputs?.removeNulls as boolean
|
const removeNulls = nodeData.inputs?.removeNulls as boolean
|
||||||
|
|
||||||
const headers = typeof _headers === 'object' ? _headers : _headers ? JSON.parse(_headers) : {}
|
const headers = typeof _headers === 'object' ? _headers : _headers ? JSON.parse(_headers) : {}
|
||||||
|
|
||||||
const specData = await this.loadOpenApiSpec(
|
let data
|
||||||
{
|
if (yamlFileBase64.startsWith('FILE-STORAGE::')) {
|
||||||
inputType,
|
const file = yamlFileBase64.replace('FILE-STORAGE::', '')
|
||||||
openApiFile,
|
const orgId = options.orgId
|
||||||
openApiLink
|
const chatflowid = options.chatflowid
|
||||||
},
|
const fileData = await getFileFromStorage(file, orgId, chatflowid)
|
||||||
options
|
const utf8String = fileData.toString('utf-8')
|
||||||
)
|
|
||||||
if (!specData) throw new Error('Failed to load OpenAPI spec')
|
|
||||||
|
|
||||||
const _data: any = await $RefParser.dereference(specData)
|
data = load(utf8String)
|
||||||
|
|
||||||
// Use selected server or fallback to first server
|
|
||||||
let baseUrl: string
|
|
||||||
if (selectedServer && selectedServer !== 'error') {
|
|
||||||
baseUrl = selectedServer
|
|
||||||
} else {
|
} else {
|
||||||
baseUrl = _data.servers?.[0]?.url
|
const splitDataURI = yamlFileBase64.split(',')
|
||||||
|
splitDataURI.pop()
|
||||||
|
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
|
||||||
|
const utf8String = bf.toString('utf-8')
|
||||||
|
data = load(utf8String)
|
||||||
|
}
|
||||||
|
if (!data) {
|
||||||
|
throw new Error('Failed to load OpenAPI spec')
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!baseUrl) throw new Error('OpenAPI spec does not contain a server URL')
|
const _data: any = await $RefParser.dereference(data)
|
||||||
|
|
||||||
|
const baseUrl = _data.servers[0]?.url
|
||||||
|
if (!baseUrl) {
|
||||||
|
throw new Error('OpenAPI spec does not contain a server URL')
|
||||||
|
}
|
||||||
|
|
||||||
const appDataSource = options.appDataSource as DataSource
|
const appDataSource = options.appDataSource as DataSource
|
||||||
const databaseEntities = options.databaseEntities as IDatabaseEntity
|
const databaseEntities = options.databaseEntities as IDatabaseEntity
|
||||||
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
||||||
|
|
||||||
const flow = { chatflowId: options.chatflowid }
|
const flow = { chatflowId: options.chatflowid }
|
||||||
|
|
||||||
let tools = getTools(_data.paths, baseUrl, headers, variables, flow, toolReturnDirect, customCode, removeNulls)
|
const tools = getTools(_data.paths, baseUrl, headers, variables, flow, toolReturnDirect, customCode, removeNulls)
|
||||||
|
|
||||||
// Filter by selected endpoints if provided
|
|
||||||
const _selected = nodeData.inputs?.selectedEndpoints
|
|
||||||
let selected: string[] = []
|
|
||||||
if (_selected) {
|
|
||||||
try {
|
|
||||||
selected = typeof _selected === 'string' ? JSON.parse(_selected) : _selected
|
|
||||||
} catch (e) {
|
|
||||||
selected = []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (selected.length) {
|
|
||||||
tools = tools.filter((t: any) => selected.includes(t.name))
|
|
||||||
}
|
|
||||||
|
|
||||||
return tools
|
return tools
|
||||||
}
|
}
|
||||||
|
|
||||||
//@ts-ignore
|
|
||||||
loadMethods = {
|
|
||||||
listServers: async (nodeData: INodeData, options: ICommonObject) => {
|
|
||||||
try {
|
|
||||||
const inputType = nodeData.inputs?.inputType as string
|
|
||||||
const openApiFile = nodeData.inputs?.openApiFile as string
|
|
||||||
const openApiLink = nodeData.inputs?.openApiLink as string
|
|
||||||
const specData: any = await this.loadOpenApiSpec(
|
|
||||||
{
|
|
||||||
inputType,
|
|
||||||
openApiFile,
|
|
||||||
openApiLink
|
|
||||||
},
|
|
||||||
options
|
|
||||||
)
|
|
||||||
if (!specData) return []
|
|
||||||
const _data: any = await $RefParser.dereference(specData)
|
|
||||||
const items: { label: string; name: string; description?: string }[] = []
|
|
||||||
const servers = _data.servers || []
|
|
||||||
|
|
||||||
if (servers.length === 0) {
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
label: 'No Servers Found',
|
|
||||||
name: 'error',
|
|
||||||
description: 'No servers defined in the OpenAPI specification'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let i = 0; i < servers.length; i++) {
|
|
||||||
const server = servers[i]
|
|
||||||
const serverUrl = server.url || `Server ${i + 1}`
|
|
||||||
const serverDesc = server.description || serverUrl
|
|
||||||
items.push({
|
|
||||||
label: serverUrl,
|
|
||||||
name: serverUrl,
|
|
||||||
description: serverDesc
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return items
|
|
||||||
} catch (e) {
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
label: 'No Servers Found',
|
|
||||||
name: 'error',
|
|
||||||
description: 'No available servers, check the link/file and refresh'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
listEndpoints: async (nodeData: INodeData, options: ICommonObject) => {
|
|
||||||
try {
|
|
||||||
const inputType = nodeData.inputs?.inputType as string
|
|
||||||
const openApiFile = nodeData.inputs?.openApiFile as string
|
|
||||||
const openApiLink = nodeData.inputs?.openApiLink as string
|
|
||||||
const specData: any = await this.loadOpenApiSpec(
|
|
||||||
{
|
|
||||||
inputType,
|
|
||||||
openApiFile,
|
|
||||||
openApiLink
|
|
||||||
},
|
|
||||||
options
|
|
||||||
)
|
|
||||||
if (!specData) return []
|
|
||||||
const _data: any = await $RefParser.dereference(specData)
|
|
||||||
const items: { label: string; name: string; description?: string }[] = []
|
|
||||||
const paths = _data.paths || {}
|
|
||||||
for (const path in paths) {
|
|
||||||
const methods = paths[path]
|
|
||||||
for (const method in methods) {
|
|
||||||
if (['get', 'post', 'put', 'delete', 'patch'].includes(method)) {
|
|
||||||
const spec = methods[method]
|
|
||||||
const opId = spec.operationId || `${method.toUpperCase()} ${path}`
|
|
||||||
const desc = spec.description || spec.summary || opId
|
|
||||||
items.push({ label: opId, name: opId, description: desc })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
items.sort((a, b) => a.label.localeCompare(b.label))
|
|
||||||
return items
|
|
||||||
} catch (e) {
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
label: 'No Endpoints Found',
|
|
||||||
name: 'error',
|
|
||||||
description: 'No available endpoints, check the link/file and refresh'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async loadOpenApiSpec(
|
|
||||||
args: {
|
|
||||||
inputType?: string
|
|
||||||
openApiFile?: string
|
|
||||||
openApiLink?: string
|
|
||||||
},
|
|
||||||
options: ICommonObject
|
|
||||||
): Promise<any | null> {
|
|
||||||
const { inputType = 'file', openApiFile = '', openApiLink = '' } = args
|
|
||||||
try {
|
|
||||||
if (inputType === 'link' && openApiLink) {
|
|
||||||
const res = await fetch(openApiLink)
|
|
||||||
const text = await res.text()
|
|
||||||
|
|
||||||
// Auto-detect format from URL extension or content
|
|
||||||
const isJsonUrl = openApiLink.toLowerCase().includes('.json')
|
|
||||||
const isYamlUrl = openApiLink.toLowerCase().includes('.yaml') || openApiLink.toLowerCase().includes('.yml')
|
|
||||||
|
|
||||||
if (isJsonUrl) {
|
|
||||||
return JSON.parse(text)
|
|
||||||
} else if (isYamlUrl) {
|
|
||||||
return load(text)
|
|
||||||
} else {
|
|
||||||
// Auto-detect format from content
|
|
||||||
try {
|
|
||||||
return JSON.parse(text)
|
|
||||||
} catch (_) {
|
|
||||||
return load(text)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (inputType === 'file' && openApiFile) {
|
|
||||||
let utf8String: string
|
|
||||||
let fileName = ''
|
|
||||||
|
|
||||||
if (openApiFile.startsWith('FILE-STORAGE::')) {
|
|
||||||
const file = openApiFile.replace('FILE-STORAGE::', '')
|
|
||||||
fileName = file
|
|
||||||
const orgId = options.orgId
|
|
||||||
const chatflowid = options.chatflowid
|
|
||||||
const fileData = await getFileFromStorage(file, orgId, chatflowid)
|
|
||||||
utf8String = fileData.toString('utf-8')
|
|
||||||
} else {
|
|
||||||
// Extract filename from data URI if possible
|
|
||||||
const splitDataURI = openApiFile.split(',')
|
|
||||||
const mimeType = splitDataURI[0] || ''
|
|
||||||
if (mimeType.includes('filename=')) {
|
|
||||||
const filenameMatch = mimeType.match(/filename=([^;]+)/)
|
|
||||||
if (filenameMatch) {
|
|
||||||
fileName = filenameMatch[1]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
splitDataURI.pop()
|
|
||||||
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
|
|
||||||
utf8String = bf.toString('utf-8')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Auto-detect format from file extension or content
|
|
||||||
const isJsonFile = fileName.toLowerCase().endsWith('.json')
|
|
||||||
const isYamlFile = fileName.toLowerCase().endsWith('.yaml') || fileName.toLowerCase().endsWith('.yml')
|
|
||||||
|
|
||||||
if (isJsonFile) {
|
|
||||||
return JSON.parse(utf8String)
|
|
||||||
} else if (isYamlFile) {
|
|
||||||
return load(utf8String)
|
|
||||||
} else {
|
|
||||||
// Auto-detect format from content
|
|
||||||
try {
|
|
||||||
return JSON.parse(utf8String)
|
|
||||||
} catch (_) {
|
|
||||||
return load(utf8String)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.error('Error loading OpenAPI spec:', e)
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const jsonSchemaToZodSchema = (schema: any, requiredList: string[], keyName: string): ZodSchema<any> => {
|
const jsonSchemaToZodSchema = (schema: any, requiredList: string[], keyName: string): ZodSchema<any> => {
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ import { RequestInit } from 'node-fetch'
|
||||||
import { RunnableConfig } from '@langchain/core/runnables'
|
import { RunnableConfig } from '@langchain/core/runnables'
|
||||||
import { StructuredTool, ToolParams } from '@langchain/core/tools'
|
import { StructuredTool, ToolParams } from '@langchain/core/tools'
|
||||||
import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager'
|
import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager'
|
||||||
import { executeJavaScriptCode, createCodeExecutionSandbox, parseWithTypeConversion } from '../../../src/utils'
|
import { executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils'
|
||||||
import { ICommonObject } from '../../../src/Interface'
|
import { ICommonObject } from '../../../src/Interface'
|
||||||
|
|
||||||
const removeNulls = (obj: Record<string, any>) => {
|
const removeNulls = (obj: Record<string, any>) => {
|
||||||
|
|
@ -174,7 +174,7 @@ export class DynamicStructuredTool<
|
||||||
}
|
}
|
||||||
let parsed
|
let parsed
|
||||||
try {
|
try {
|
||||||
parsed = await parseWithTypeConversion(this.schema, arg)
|
parsed = await this.schema.parseAsync(arg)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw new ToolInputParsingException(`Received tool input did not match expected schema ${e}`, JSON.stringify(arg))
|
throw new ToolInputParsingException(`Received tool input did not match expected schema ${e}`, JSON.stringify(arg))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,85 @@
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { StructuredTool, ToolParams } from '@langchain/core/tools'
|
||||||
|
import { Serializable } from '@langchain/core/load/serializable'
|
||||||
|
import { NodeFileStore } from 'langchain/stores/file/node'
|
||||||
|
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||||
|
import { getBaseClasses } from '../../../src/utils'
|
||||||
|
|
||||||
|
abstract class BaseFileStore extends Serializable {
|
||||||
|
abstract readFile(path: string): Promise<string>
|
||||||
|
abstract writeFile(path: string, contents: string): Promise<void>
|
||||||
|
}
|
||||||
|
|
||||||
|
class ReadFile_Tools implements INode {
|
||||||
|
label: string
|
||||||
|
name: string
|
||||||
|
version: number
|
||||||
|
description: string
|
||||||
|
type: string
|
||||||
|
icon: string
|
||||||
|
category: string
|
||||||
|
baseClasses: string[]
|
||||||
|
inputs: INodeParams[]
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.label = 'Read File'
|
||||||
|
this.name = 'readFile'
|
||||||
|
this.version = 1.0
|
||||||
|
this.type = 'ReadFile'
|
||||||
|
this.icon = 'readfile.svg'
|
||||||
|
this.category = 'Tools'
|
||||||
|
this.description = 'Read file from disk'
|
||||||
|
this.baseClasses = [this.type, 'Tool', ...getBaseClasses(ReadFileTool)]
|
||||||
|
this.inputs = [
|
||||||
|
{
|
||||||
|
label: 'Base Path',
|
||||||
|
name: 'basePath',
|
||||||
|
placeholder: `C:\\Users\\User\\Desktop`,
|
||||||
|
type: 'string',
|
||||||
|
optional: true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
async init(nodeData: INodeData): Promise<any> {
|
||||||
|
const basePath = nodeData.inputs?.basePath as string
|
||||||
|
const store = basePath ? new NodeFileStore(basePath) : new NodeFileStore()
|
||||||
|
return new ReadFileTool({ store })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ReadFileParams extends ToolParams {
|
||||||
|
store: BaseFileStore
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class for reading files from the disk. Extends the StructuredTool
|
||||||
|
* class.
|
||||||
|
*/
|
||||||
|
export class ReadFileTool extends StructuredTool {
|
||||||
|
static lc_name() {
|
||||||
|
return 'ReadFileTool'
|
||||||
|
}
|
||||||
|
|
||||||
|
schema = z.object({
|
||||||
|
file_path: z.string().describe('name of file')
|
||||||
|
}) as any
|
||||||
|
|
||||||
|
name = 'read_file'
|
||||||
|
|
||||||
|
description = 'Read file from disk'
|
||||||
|
|
||||||
|
store: BaseFileStore
|
||||||
|
|
||||||
|
constructor({ store }: ReadFileParams) {
|
||||||
|
super(...arguments)
|
||||||
|
|
||||||
|
this.store = store
|
||||||
|
}
|
||||||
|
|
||||||
|
async _call({ file_path }: z.infer<typeof this.schema>) {
|
||||||
|
return await this.store.readFile(file_path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { nodeClass: ReadFile_Tools }
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path d="M18 5H9C7.89543 5 7 5.89543 7 7V25C7 26.1046 7.89543 27 9 27H12M18 5L25 12M18 5V12H25M25 12V25C25 26.1046 24.1046 27 23 27H20" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||||
|
<path d="M16 17V29M16 17L13 20.1361M16 17L19 20.1361" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 455 B |
|
|
@ -3,7 +3,7 @@ import { CallbackManager, CallbackManagerForToolRun, Callbacks, parseCallbackCon
|
||||||
import { BaseDynamicToolInput, DynamicTool, StructuredTool, ToolInputParsingException } from '@langchain/core/tools'
|
import { BaseDynamicToolInput, DynamicTool, StructuredTool, ToolInputParsingException } from '@langchain/core/tools'
|
||||||
import { BaseRetriever } from '@langchain/core/retrievers'
|
import { BaseRetriever } from '@langchain/core/retrievers'
|
||||||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||||
import { getBaseClasses, resolveFlowObjValue, parseWithTypeConversion } from '../../../src/utils'
|
import { getBaseClasses, resolveFlowObjValue } from '../../../src/utils'
|
||||||
import { SOURCE_DOCUMENTS_PREFIX } from '../../../src/agents'
|
import { SOURCE_DOCUMENTS_PREFIX } from '../../../src/agents'
|
||||||
import { RunnableConfig } from '@langchain/core/runnables'
|
import { RunnableConfig } from '@langchain/core/runnables'
|
||||||
import { VectorStoreRetriever } from '@langchain/core/vectorstores'
|
import { VectorStoreRetriever } from '@langchain/core/vectorstores'
|
||||||
|
|
@ -58,7 +58,7 @@ class DynamicStructuredTool<T extends z.ZodObject<any, any, any, any> = z.ZodObj
|
||||||
}
|
}
|
||||||
let parsed
|
let parsed
|
||||||
try {
|
try {
|
||||||
parsed = await parseWithTypeConversion(this.schema, arg)
|
parsed = await this.schema.parseAsync(arg)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw new ToolInputParsingException(`Received tool input did not match expected schema`, JSON.stringify(arg))
|
throw new ToolInputParsingException(`Received tool input did not match expected schema`, JSON.stringify(arg))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,87 @@
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { StructuredTool, ToolParams } from '@langchain/core/tools'
|
||||||
|
import { Serializable } from '@langchain/core/load/serializable'
|
||||||
|
import { NodeFileStore } from 'langchain/stores/file/node'
|
||||||
|
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||||
|
import { getBaseClasses } from '../../../src/utils'
|
||||||
|
|
||||||
|
abstract class BaseFileStore extends Serializable {
|
||||||
|
abstract readFile(path: string): Promise<string>
|
||||||
|
abstract writeFile(path: string, contents: string): Promise<void>
|
||||||
|
}
|
||||||
|
|
||||||
|
class WriteFile_Tools implements INode {
|
||||||
|
label: string
|
||||||
|
name: string
|
||||||
|
version: number
|
||||||
|
description: string
|
||||||
|
type: string
|
||||||
|
icon: string
|
||||||
|
category: string
|
||||||
|
baseClasses: string[]
|
||||||
|
inputs: INodeParams[]
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.label = 'Write File'
|
||||||
|
this.name = 'writeFile'
|
||||||
|
this.version = 1.0
|
||||||
|
this.type = 'WriteFile'
|
||||||
|
this.icon = 'writefile.svg'
|
||||||
|
this.category = 'Tools'
|
||||||
|
this.description = 'Write file to disk'
|
||||||
|
this.baseClasses = [this.type, 'Tool', ...getBaseClasses(WriteFileTool)]
|
||||||
|
this.inputs = [
|
||||||
|
{
|
||||||
|
label: 'Base Path',
|
||||||
|
name: 'basePath',
|
||||||
|
placeholder: `C:\\Users\\User\\Desktop`,
|
||||||
|
type: 'string',
|
||||||
|
optional: true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
async init(nodeData: INodeData): Promise<any> {
|
||||||
|
const basePath = nodeData.inputs?.basePath as string
|
||||||
|
const store = basePath ? new NodeFileStore(basePath) : new NodeFileStore()
|
||||||
|
return new WriteFileTool({ store })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
interface WriteFileParams extends ToolParams {
|
||||||
|
store: BaseFileStore
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class for writing data to files on the disk. Extends the StructuredTool
|
||||||
|
* class.
|
||||||
|
*/
|
||||||
|
export class WriteFileTool extends StructuredTool {
|
||||||
|
static lc_name() {
|
||||||
|
return 'WriteFileTool'
|
||||||
|
}
|
||||||
|
|
||||||
|
schema = z.object({
|
||||||
|
file_path: z.string().describe('name of file'),
|
||||||
|
text: z.string().describe('text to write to file')
|
||||||
|
}) as any
|
||||||
|
|
||||||
|
name = 'write_file'
|
||||||
|
|
||||||
|
description = 'Write file from disk'
|
||||||
|
|
||||||
|
store: BaseFileStore
|
||||||
|
|
||||||
|
constructor({ store, ...rest }: WriteFileParams) {
|
||||||
|
super(rest)
|
||||||
|
|
||||||
|
this.store = store
|
||||||
|
}
|
||||||
|
|
||||||
|
async _call({ file_path, text }: z.infer<typeof this.schema>) {
|
||||||
|
await this.store.writeFile(file_path, text)
|
||||||
|
return 'File written to successfully.'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { nodeClass: WriteFile_Tools }
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path d="M25 18V25C25 26.1046 24.1046 27 23 27H9C7.89543 27 7 26.1046 7 25V7C7 5.89543 7.89543 5 9 5H18L19 6" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||||
|
<path d="M12 19.3284V22H14.6716C15.202 22 15.7107 21.7893 16.0858 21.4142L24.5858 12.9142C25.3668 12.1332 25.3668 10.8668 24.5858 10.0858L23.9142 9.41421C23.1332 8.63316 21.8668 8.63317 21.0858 9.41421L12.5858 17.9142C12.2107 18.2893 12 18.798 12 19.3284Z" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 632 B |
|
|
@ -84,16 +84,11 @@ class CustomFunction_Utilities implements INode {
|
||||||
|
|
||||||
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
||||||
const flow = {
|
const flow = {
|
||||||
input,
|
|
||||||
chatflowId: options.chatflowid,
|
chatflowId: options.chatflowid,
|
||||||
sessionId: options.sessionId,
|
sessionId: options.sessionId,
|
||||||
chatId: options.chatId,
|
chatId: options.chatId,
|
||||||
rawOutput: options.postProcessing?.rawOutput || '',
|
rawOutput: options.rawOutput || '',
|
||||||
chatHistory: options.postProcessing?.chatHistory || [],
|
input
|
||||||
sourceDocuments: options.postProcessing?.sourceDocuments,
|
|
||||||
usedTools: options.postProcessing?.usedTools,
|
|
||||||
artifacts: options.postProcessing?.artifacts,
|
|
||||||
fileAnnotations: options.postProcessing?.fileAnnotations
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let inputVars: ICommonObject = {}
|
let inputVars: ICommonObject = {}
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ import { Chroma } from '@langchain/community/vectorstores/chroma'
|
||||||
import { Embeddings } from '@langchain/core/embeddings'
|
import { Embeddings } from '@langchain/core/embeddings'
|
||||||
import { Document } from '@langchain/core/documents'
|
import { Document } from '@langchain/core/documents'
|
||||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
||||||
import { getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils'
|
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||||
import { ChromaExtended } from './core'
|
import { ChromaExtended } from './core'
|
||||||
import { index } from '../../../src/indexing'
|
import { index } from '../../../src/indexing'
|
||||||
|
|
||||||
|
|
@ -186,11 +186,7 @@ class Chroma_VectorStores implements INode {
|
||||||
const vectorStoreName = collectionName
|
const vectorStoreName = collectionName
|
||||||
await recordManager.createSchema()
|
await recordManager.createSchema()
|
||||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||||
const filterKeys: ICommonObject = {}
|
const keys: string[] = await recordManager.listKeys({})
|
||||||
if (options.docId) {
|
|
||||||
filterKeys.docId = options.docId
|
|
||||||
}
|
|
||||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
|
||||||
|
|
||||||
const chromaStore = new ChromaExtended(embeddings, obj)
|
const chromaStore = new ChromaExtended(embeddings, obj)
|
||||||
|
|
||||||
|
|
@ -233,7 +229,7 @@ class Chroma_VectorStores implements INode {
|
||||||
if (chromaTenant) obj.chromaTenant = chromaTenant
|
if (chromaTenant) obj.chromaTenant = chromaTenant
|
||||||
if (chromaDatabase) obj.chromaDatabase = chromaDatabase
|
if (chromaDatabase) obj.chromaDatabase = chromaDatabase
|
||||||
if (chromaMetadataFilter) {
|
if (chromaMetadataFilter) {
|
||||||
const metadatafilter = typeof chromaMetadataFilter === 'object' ? chromaMetadataFilter : parseJsonBody(chromaMetadataFilter)
|
const metadatafilter = typeof chromaMetadataFilter === 'object' ? chromaMetadataFilter : JSON.parse(chromaMetadataFilter)
|
||||||
obj.filter = metadatafilter
|
obj.filter = metadatafilter
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ import { Document } from '@langchain/core/documents'
|
||||||
import { CouchbaseVectorStore, CouchbaseVectorStoreArgs } from '@langchain/community/vectorstores/couchbase'
|
import { CouchbaseVectorStore, CouchbaseVectorStoreArgs } from '@langchain/community/vectorstores/couchbase'
|
||||||
import { Cluster } from 'couchbase'
|
import { Cluster } from 'couchbase'
|
||||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
||||||
import { getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils'
|
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||||
import { resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
import { resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
||||||
|
|
||||||
class Couchbase_VectorStores implements INode {
|
class Couchbase_VectorStores implements INode {
|
||||||
|
|
@ -215,8 +215,7 @@ class Couchbase_VectorStores implements INode {
|
||||||
if (!embeddingKey || embeddingKey === '') couchbaseConfig.embeddingKey = 'embedding'
|
if (!embeddingKey || embeddingKey === '') couchbaseConfig.embeddingKey = 'embedding'
|
||||||
|
|
||||||
if (couchbaseMetadataFilter) {
|
if (couchbaseMetadataFilter) {
|
||||||
metadatafilter =
|
metadatafilter = typeof couchbaseMetadataFilter === 'object' ? couchbaseMetadataFilter : JSON.parse(couchbaseMetadataFilter)
|
||||||
typeof couchbaseMetadataFilter === 'object' ? couchbaseMetadataFilter : parseJsonBody(couchbaseMetadataFilter)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const vectorStore = await CouchbaseVectorStore.initialize(embeddings, couchbaseConfig)
|
const vectorStore = await CouchbaseVectorStore.initialize(embeddings, couchbaseConfig)
|
||||||
|
|
|
||||||
|
|
@ -198,11 +198,7 @@ class Elasticsearch_VectorStores implements INode {
|
||||||
const vectorStoreName = indexName
|
const vectorStoreName = indexName
|
||||||
await recordManager.createSchema()
|
await recordManager.createSchema()
|
||||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||||
const filterKeys: ICommonObject = {}
|
const keys: string[] = await recordManager.listKeys({})
|
||||||
if (options.docId) {
|
|
||||||
filterKeys.docId = options.docId
|
|
||||||
}
|
|
||||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
|
||||||
|
|
||||||
await vectorStore.delete({ ids: keys })
|
await vectorStore.delete({ ids: keys })
|
||||||
await recordManager.deleteKeys(keys)
|
await recordManager.deleteKeys(keys)
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ import { AmazonKendraRetriever } from '@langchain/aws'
|
||||||
import { KendraClient, BatchPutDocumentCommand, BatchDeleteDocumentCommand } from '@aws-sdk/client-kendra'
|
import { KendraClient, BatchPutDocumentCommand, BatchDeleteDocumentCommand } from '@aws-sdk/client-kendra'
|
||||||
import { Document } from '@langchain/core/documents'
|
import { Document } from '@langchain/core/documents'
|
||||||
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
||||||
import { FLOWISE_CHATID, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils'
|
import { FLOWISE_CHATID, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||||
import { howToUseFileUpload } from '../VectorStoreUtils'
|
import { howToUseFileUpload } from '../VectorStoreUtils'
|
||||||
import { MODEL_TYPE, getRegions } from '../../../src/modelLoader'
|
import { MODEL_TYPE, getRegions } from '../../../src/modelLoader'
|
||||||
|
|
||||||
|
|
@ -248,7 +248,7 @@ class Kendra_VectorStores implements INode {
|
||||||
|
|
||||||
let filter = undefined
|
let filter = undefined
|
||||||
if (attributeFilter) {
|
if (attributeFilter) {
|
||||||
filter = typeof attributeFilter === 'object' ? attributeFilter : parseJsonBody(attributeFilter)
|
filter = typeof attributeFilter === 'object' ? attributeFilter : JSON.parse(attributeFilter)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add chat-specific filtering if file upload is enabled
|
// Add chat-specific filtering if file upload is enabled
|
||||||
|
|
|
||||||
|
|
@ -79,7 +79,7 @@ class MeilisearchRetriever_node implements INode {
|
||||||
label: 'Semantic Ratio',
|
label: 'Semantic Ratio',
|
||||||
name: 'semanticRatio',
|
name: 'semanticRatio',
|
||||||
type: 'number',
|
type: 'number',
|
||||||
description: 'percentage of semantic reasoning in meilisearch hybrid search, default is 0.75',
|
description: 'percentage of sematic reasoning in meilisearch hybrid search, default is 0.75',
|
||||||
additionalParams: true,
|
additionalParams: true,
|
||||||
optional: true
|
optional: true
|
||||||
},
|
},
|
||||||
|
|
@ -162,7 +162,7 @@ class MeilisearchRetriever_node implements INode {
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error)
|
console.error(error)
|
||||||
console.warn('Error occurred when deleting your index, if it did not exist, we will create one for you... ')
|
console.warn('Error occured when deleting your index, if it did not exist, we will create one for you... ')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ import { flatten } from 'lodash'
|
||||||
import { Embeddings } from '@langchain/core/embeddings'
|
import { Embeddings } from '@langchain/core/embeddings'
|
||||||
import { Document } from '@langchain/core/documents'
|
import { Document } from '@langchain/core/documents'
|
||||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
||||||
import { getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils'
|
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||||
import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
||||||
import { MongoDBAtlasVectorSearch } from './core'
|
import { MongoDBAtlasVectorSearch } from './core'
|
||||||
|
|
||||||
|
|
@ -187,7 +187,7 @@ class MongoDBAtlas_VectorStores implements INode {
|
||||||
})
|
})
|
||||||
|
|
||||||
if (mongoMetadataFilter) {
|
if (mongoMetadataFilter) {
|
||||||
const metadataFilter = typeof mongoMetadataFilter === 'object' ? mongoMetadataFilter : parseJsonBody(mongoMetadataFilter)
|
const metadataFilter = typeof mongoMetadataFilter === 'object' ? mongoMetadataFilter : JSON.parse(mongoMetadataFilter)
|
||||||
|
|
||||||
for (const key in metadataFilter) {
|
for (const key in metadataFilter) {
|
||||||
mongoDbFilter.preFilter = {
|
mongoDbFilter.preFilter = {
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ import { Embeddings } from '@langchain/core/embeddings'
|
||||||
import { Document } from '@langchain/core/documents'
|
import { Document } from '@langchain/core/documents'
|
||||||
import { VectorStore } from '@langchain/core/vectorstores'
|
import { VectorStore } from '@langchain/core/vectorstores'
|
||||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
||||||
import { FLOWISE_CHATID, getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils'
|
import { FLOWISE_CHATID, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||||
import { addMMRInputParams, howToUseFileUpload, resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
import { addMMRInputParams, howToUseFileUpload, resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
||||||
import { index } from '../../../src/indexing'
|
import { index } from '../../../src/indexing'
|
||||||
|
|
||||||
|
|
@ -212,11 +212,7 @@ class Pinecone_VectorStores implements INode {
|
||||||
const vectorStoreName = pineconeNamespace
|
const vectorStoreName = pineconeNamespace
|
||||||
await recordManager.createSchema()
|
await recordManager.createSchema()
|
||||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||||
const filterKeys: ICommonObject = {}
|
const keys: string[] = await recordManager.listKeys({})
|
||||||
if (options.docId) {
|
|
||||||
filterKeys.docId = options.docId
|
|
||||||
}
|
|
||||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
|
||||||
|
|
||||||
await pineconeStore.delete({ ids: keys })
|
await pineconeStore.delete({ ids: keys })
|
||||||
await recordManager.deleteKeys(keys)
|
await recordManager.deleteKeys(keys)
|
||||||
|
|
@ -252,8 +248,7 @@ class Pinecone_VectorStores implements INode {
|
||||||
|
|
||||||
if (pineconeNamespace) obj.namespace = pineconeNamespace
|
if (pineconeNamespace) obj.namespace = pineconeNamespace
|
||||||
if (pineconeMetadataFilter) {
|
if (pineconeMetadataFilter) {
|
||||||
const metadatafilter =
|
const metadatafilter = typeof pineconeMetadataFilter === 'object' ? pineconeMetadataFilter : JSON.parse(pineconeMetadataFilter)
|
||||||
typeof pineconeMetadataFilter === 'object' ? pineconeMetadataFilter : parseJsonBody(pineconeMetadataFilter)
|
|
||||||
obj.filter = metadatafilter
|
obj.filter = metadatafilter
|
||||||
}
|
}
|
||||||
if (isFileUploadEnabled && options.chatId) {
|
if (isFileUploadEnabled && options.chatId) {
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ import { FetchResponse, Index, Pinecone, ScoredPineconeRecord } from '@pinecone-
|
||||||
import { flatten } from 'lodash'
|
import { flatten } from 'lodash'
|
||||||
import { Document as LCDocument } from 'langchain/document'
|
import { Document as LCDocument } from 'langchain/document'
|
||||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
||||||
import { flattenObject, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils'
|
import { flattenObject, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||||
|
|
||||||
class PineconeLlamaIndex_VectorStores implements INode {
|
class PineconeLlamaIndex_VectorStores implements INode {
|
||||||
label: string
|
label: string
|
||||||
|
|
@ -176,7 +176,7 @@ class PineconeLlamaIndex_VectorStores implements INode {
|
||||||
|
|
||||||
let metadatafilter = {}
|
let metadatafilter = {}
|
||||||
if (pineconeMetadataFilter) {
|
if (pineconeMetadataFilter) {
|
||||||
metadatafilter = typeof pineconeMetadataFilter === 'object' ? pineconeMetadataFilter : parseJsonBody(pineconeMetadataFilter)
|
metadatafilter = typeof pineconeMetadataFilter === 'object' ? pineconeMetadataFilter : JSON.parse(pineconeMetadataFilter)
|
||||||
obj.queryFilter = metadatafilter
|
obj.queryFilter = metadatafilter
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
import { flatten } from 'lodash'
|
import { flatten } from 'lodash'
|
||||||
import { Document } from '@langchain/core/documents'
|
import { Document } from '@langchain/core/documents'
|
||||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
||||||
import { FLOWISE_CHATID, getBaseClasses, parseJsonBody } from '../../../src/utils'
|
import { FLOWISE_CHATID, getBaseClasses } from '../../../src/utils'
|
||||||
import { index } from '../../../src/indexing'
|
import { index } from '../../../src/indexing'
|
||||||
import { howToUseFileUpload } from '../VectorStoreUtils'
|
import { howToUseFileUpload } from '../VectorStoreUtils'
|
||||||
import { VectorStore } from '@langchain/core/vectorstores'
|
import { VectorStore } from '@langchain/core/vectorstores'
|
||||||
|
|
@ -49,7 +49,7 @@ class Postgres_VectorStores implements INode {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.label = 'Postgres'
|
this.label = 'Postgres'
|
||||||
this.name = 'postgres'
|
this.name = 'postgres'
|
||||||
this.version = 7.1
|
this.version = 7.0
|
||||||
this.type = 'Postgres'
|
this.type = 'Postgres'
|
||||||
this.icon = 'postgres.svg'
|
this.icon = 'postgres.svg'
|
||||||
this.category = 'Vector Stores'
|
this.category = 'Vector Stores'
|
||||||
|
|
@ -173,15 +173,6 @@ class Postgres_VectorStores implements INode {
|
||||||
additionalParams: true,
|
additionalParams: true,
|
||||||
optional: true
|
optional: true
|
||||||
},
|
},
|
||||||
{
|
|
||||||
label: 'Upsert Batch Size',
|
|
||||||
name: 'batchSize',
|
|
||||||
type: 'number',
|
|
||||||
step: 1,
|
|
||||||
description: 'Upsert in batches of size N',
|
|
||||||
additionalParams: true,
|
|
||||||
optional: true
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
label: 'Additional Configuration',
|
label: 'Additional Configuration',
|
||||||
name: 'additionalConfig',
|
name: 'additionalConfig',
|
||||||
|
|
@ -241,7 +232,6 @@ class Postgres_VectorStores implements INode {
|
||||||
const docs = nodeData.inputs?.document as Document[]
|
const docs = nodeData.inputs?.document as Document[]
|
||||||
const recordManager = nodeData.inputs?.recordManager
|
const recordManager = nodeData.inputs?.recordManager
|
||||||
const isFileUploadEnabled = nodeData.inputs?.fileUpload as boolean
|
const isFileUploadEnabled = nodeData.inputs?.fileUpload as boolean
|
||||||
const _batchSize = nodeData.inputs?.batchSize
|
|
||||||
const vectorStoreDriver: VectorStoreDriver = Postgres_VectorStores.getDriverFromConfig(nodeData, options)
|
const vectorStoreDriver: VectorStoreDriver = Postgres_VectorStores.getDriverFromConfig(nodeData, options)
|
||||||
|
|
||||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||||
|
|
@ -275,15 +265,7 @@ class Postgres_VectorStores implements INode {
|
||||||
|
|
||||||
return res
|
return res
|
||||||
} else {
|
} else {
|
||||||
if (_batchSize) {
|
await vectorStoreDriver.fromDocuments(finalDocs)
|
||||||
const batchSize = parseInt(_batchSize, 10)
|
|
||||||
for (let i = 0; i < finalDocs.length; i += batchSize) {
|
|
||||||
const batch = finalDocs.slice(i, i + batchSize)
|
|
||||||
await vectorStoreDriver.fromDocuments(batch)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
await vectorStoreDriver.fromDocuments(finalDocs)
|
|
||||||
}
|
|
||||||
|
|
||||||
return { numAdded: finalDocs.length, addedDocs: finalDocs }
|
return { numAdded: finalDocs.length, addedDocs: finalDocs }
|
||||||
}
|
}
|
||||||
|
|
@ -303,11 +285,7 @@ class Postgres_VectorStores implements INode {
|
||||||
const vectorStoreName = tableName
|
const vectorStoreName = tableName
|
||||||
await recordManager.createSchema()
|
await recordManager.createSchema()
|
||||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||||
const filterKeys: ICommonObject = {}
|
const keys: string[] = await recordManager.listKeys({})
|
||||||
if (options.docId) {
|
|
||||||
filterKeys.docId = options.docId
|
|
||||||
}
|
|
||||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
|
||||||
|
|
||||||
await vectorStore.delete({ ids: keys })
|
await vectorStore.delete({ ids: keys })
|
||||||
await recordManager.deleteKeys(keys)
|
await recordManager.deleteKeys(keys)
|
||||||
|
|
@ -330,7 +308,7 @@ class Postgres_VectorStores implements INode {
|
||||||
|
|
||||||
let pgMetadataFilter: any
|
let pgMetadataFilter: any
|
||||||
if (_pgMetadataFilter) {
|
if (_pgMetadataFilter) {
|
||||||
pgMetadataFilter = typeof _pgMetadataFilter === 'object' ? _pgMetadataFilter : parseJsonBody(_pgMetadataFilter)
|
pgMetadataFilter = typeof _pgMetadataFilter === 'object' ? _pgMetadataFilter : JSON.parse(_pgMetadataFilter)
|
||||||
}
|
}
|
||||||
if (isFileUploadEnabled && options.chatId) {
|
if (isFileUploadEnabled && options.chatId) {
|
||||||
pgMetadataFilter = {
|
pgMetadataFilter = {
|
||||||
|
|
|
||||||
|
|
@ -5,11 +5,6 @@ import { TypeORMVectorStore, TypeORMVectorStoreArgs, TypeORMVectorStoreDocument
|
||||||
import { VectorStore } from '@langchain/core/vectorstores'
|
import { VectorStore } from '@langchain/core/vectorstores'
|
||||||
import { Document } from '@langchain/core/documents'
|
import { Document } from '@langchain/core/documents'
|
||||||
import { Pool } from 'pg'
|
import { Pool } from 'pg'
|
||||||
import { v4 as uuid } from 'uuid'
|
|
||||||
|
|
||||||
type TypeORMAddDocumentOptions = {
|
|
||||||
ids?: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
export class TypeORMDriver extends VectorStoreDriver {
|
export class TypeORMDriver extends VectorStoreDriver {
|
||||||
protected _postgresConnectionOptions: DataSourceOptions
|
protected _postgresConnectionOptions: DataSourceOptions
|
||||||
|
|
@ -100,45 +95,15 @@ export class TypeORMDriver extends VectorStoreDriver {
|
||||||
try {
|
try {
|
||||||
instance.appDataSource.getRepository(instance.documentEntity).delete(ids)
|
instance.appDataSource.getRepository(instance.documentEntity).delete(ids)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error('Failed to delete', e)
|
console.error('Failed to delete')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
instance.addVectors = async (
|
const baseAddVectorsFn = instance.addVectors.bind(instance)
|
||||||
vectors: number[][],
|
|
||||||
documents: Document[],
|
|
||||||
documentOptions?: TypeORMAddDocumentOptions
|
|
||||||
): Promise<void> => {
|
|
||||||
const rows = vectors.map((embedding, idx) => {
|
|
||||||
const embeddingString = `[${embedding.join(',')}]`
|
|
||||||
const documentRow = {
|
|
||||||
id: documentOptions?.ids?.length ? documentOptions.ids[idx] : uuid(),
|
|
||||||
pageContent: documents[idx].pageContent,
|
|
||||||
embedding: embeddingString,
|
|
||||||
metadata: documents[idx].metadata
|
|
||||||
}
|
|
||||||
return documentRow
|
|
||||||
})
|
|
||||||
|
|
||||||
const documentRepository = instance.appDataSource.getRepository(instance.documentEntity)
|
instance.addVectors = async (vectors, documents) => {
|
||||||
const _batchSize = this.nodeData.inputs?.batchSize
|
return baseAddVectorsFn(vectors, this.sanitizeDocuments(documents))
|
||||||
const chunkSize = _batchSize ? parseInt(_batchSize, 10) : 500
|
|
||||||
|
|
||||||
for (let i = 0; i < rows.length; i += chunkSize) {
|
|
||||||
const chunk = rows.slice(i, i + chunkSize)
|
|
||||||
try {
|
|
||||||
await documentRepository.save(chunk)
|
|
||||||
} catch (e) {
|
|
||||||
console.error(e)
|
|
||||||
throw new Error(`Error inserting: ${chunk[0].pageContent}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
instance.addDocuments = async (documents: Document[], options?: { ids?: string[] }): Promise<void> => {
|
|
||||||
const texts = documents.map(({ pageContent }) => pageContent)
|
|
||||||
return (instance.addVectors as any)(await this.getEmbeddings().embedDocuments(texts), documents, options)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return instance
|
return instance
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ import { Document } from '@langchain/core/documents'
|
||||||
import { QdrantVectorStore, QdrantLibArgs } from '@langchain/qdrant'
|
import { QdrantVectorStore, QdrantLibArgs } from '@langchain/qdrant'
|
||||||
import { Embeddings } from '@langchain/core/embeddings'
|
import { Embeddings } from '@langchain/core/embeddings'
|
||||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
||||||
import { FLOWISE_CHATID, getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils'
|
import { FLOWISE_CHATID, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||||
import { index } from '../../../src/indexing'
|
import { index } from '../../../src/indexing'
|
||||||
import { howToUseFileUpload } from '../VectorStoreUtils'
|
import { howToUseFileUpload } from '../VectorStoreUtils'
|
||||||
|
|
||||||
|
|
@ -77,8 +77,7 @@ class Qdrant_VectorStores implements INode {
|
||||||
{
|
{
|
||||||
label: 'Qdrant Collection Name',
|
label: 'Qdrant Collection Name',
|
||||||
name: 'qdrantCollection',
|
name: 'qdrantCollection',
|
||||||
type: 'string',
|
type: 'string'
|
||||||
acceptVariable: true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'File Upload',
|
label: 'File Upload',
|
||||||
|
|
@ -385,11 +384,7 @@ class Qdrant_VectorStores implements INode {
|
||||||
const vectorStoreName = collectionName
|
const vectorStoreName = collectionName
|
||||||
await recordManager.createSchema()
|
await recordManager.createSchema()
|
||||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||||
const filterKeys: ICommonObject = {}
|
const keys: string[] = await recordManager.listKeys({})
|
||||||
if (options.docId) {
|
|
||||||
filterKeys.docId = options.docId
|
|
||||||
}
|
|
||||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
|
||||||
|
|
||||||
await vectorStore.delete({ ids: keys })
|
await vectorStore.delete({ ids: keys })
|
||||||
await recordManager.deleteKeys(keys)
|
await recordManager.deleteKeys(keys)
|
||||||
|
|
@ -444,7 +439,7 @@ class Qdrant_VectorStores implements INode {
|
||||||
qdrantCollectionConfiguration =
|
qdrantCollectionConfiguration =
|
||||||
typeof qdrantCollectionConfiguration === 'object'
|
typeof qdrantCollectionConfiguration === 'object'
|
||||||
? qdrantCollectionConfiguration
|
? qdrantCollectionConfiguration
|
||||||
: parseJsonBody(qdrantCollectionConfiguration)
|
: JSON.parse(qdrantCollectionConfiguration)
|
||||||
dbConfig.collectionConfig = {
|
dbConfig.collectionConfig = {
|
||||||
...qdrantCollectionConfiguration,
|
...qdrantCollectionConfiguration,
|
||||||
vectors: {
|
vectors: {
|
||||||
|
|
@ -456,7 +451,7 @@ class Qdrant_VectorStores implements INode {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (queryFilter) {
|
if (queryFilter) {
|
||||||
retrieverConfig.filter = typeof queryFilter === 'object' ? queryFilter : parseJsonBody(queryFilter)
|
retrieverConfig.filter = typeof queryFilter === 'object' ? queryFilter : JSON.parse(queryFilter)
|
||||||
}
|
}
|
||||||
if (isFileUploadEnabled && options.chatId) {
|
if (isFileUploadEnabled && options.chatId) {
|
||||||
retrieverConfig.filter = retrieverConfig.filter || {}
|
retrieverConfig.filter = retrieverConfig.filter || {}
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ import { Document } from '@langchain/core/documents'
|
||||||
import { Embeddings } from '@langchain/core/embeddings'
|
import { Embeddings } from '@langchain/core/embeddings'
|
||||||
import { SupabaseVectorStore, SupabaseLibArgs } from '@langchain/community/vectorstores/supabase'
|
import { SupabaseVectorStore, SupabaseLibArgs } from '@langchain/community/vectorstores/supabase'
|
||||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
||||||
import { getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils'
|
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||||
import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
||||||
import { index } from '../../../src/indexing'
|
import { index } from '../../../src/indexing'
|
||||||
import { FilterParser } from './filterParser'
|
import { FilterParser } from './filterParser'
|
||||||
|
|
@ -197,11 +197,7 @@ class Supabase_VectorStores implements INode {
|
||||||
const vectorStoreName = tableName + '_' + queryName
|
const vectorStoreName = tableName + '_' + queryName
|
||||||
await recordManager.createSchema()
|
await recordManager.createSchema()
|
||||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||||
const filterKeys: ICommonObject = {}
|
const keys: string[] = await recordManager.listKeys({})
|
||||||
if (options.docId) {
|
|
||||||
filterKeys.docId = options.docId
|
|
||||||
}
|
|
||||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
|
||||||
|
|
||||||
await supabaseStore.delete({ ids: keys })
|
await supabaseStore.delete({ ids: keys })
|
||||||
await recordManager.deleteKeys(keys)
|
await recordManager.deleteKeys(keys)
|
||||||
|
|
@ -234,8 +230,7 @@ class Supabase_VectorStores implements INode {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (supabaseMetadataFilter) {
|
if (supabaseMetadataFilter) {
|
||||||
const metadatafilter =
|
const metadatafilter = typeof supabaseMetadataFilter === 'object' ? supabaseMetadataFilter : JSON.parse(supabaseMetadataFilter)
|
||||||
typeof supabaseMetadataFilter === 'object' ? supabaseMetadataFilter : parseJsonBody(supabaseMetadataFilter)
|
|
||||||
obj.filter = metadatafilter
|
obj.filter = metadatafilter
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -187,11 +187,7 @@ class Upstash_VectorStores implements INode {
|
||||||
const vectorStoreName = UPSTASH_VECTOR_REST_URL
|
const vectorStoreName = UPSTASH_VECTOR_REST_URL
|
||||||
await recordManager.createSchema()
|
await recordManager.createSchema()
|
||||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||||
const filterKeys: ICommonObject = {}
|
const keys: string[] = await recordManager.listKeys({})
|
||||||
if (options.docId) {
|
|
||||||
filterKeys.docId = options.docId
|
|
||||||
}
|
|
||||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
|
||||||
|
|
||||||
await upstashStore.delete({ ids: keys })
|
await upstashStore.delete({ ids: keys })
|
||||||
await recordManager.deleteKeys(keys)
|
await recordManager.deleteKeys(keys)
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ import { WeaviateLibArgs, WeaviateStore } from '@langchain/weaviate'
|
||||||
import { Document } from '@langchain/core/documents'
|
import { Document } from '@langchain/core/documents'
|
||||||
import { Embeddings } from '@langchain/core/embeddings'
|
import { Embeddings } from '@langchain/core/embeddings'
|
||||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
||||||
import { getBaseClasses, getCredentialData, getCredentialParam, normalizeKeysRecursively, parseJsonBody } from '../../../src/utils'
|
import { getBaseClasses, getCredentialData, getCredentialParam, normalizeKeysRecursively } from '../../../src/utils'
|
||||||
import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
||||||
import { index } from '../../../src/indexing'
|
import { index } from '../../../src/indexing'
|
||||||
import { VectorStore } from '@langchain/core/vectorstores'
|
import { VectorStore } from '@langchain/core/vectorstores'
|
||||||
|
|
@ -252,11 +252,7 @@ class Weaviate_VectorStores implements INode {
|
||||||
const vectorStoreName = weaviateTextKey ? weaviateIndex + '_' + weaviateTextKey : weaviateIndex
|
const vectorStoreName = weaviateTextKey ? weaviateIndex + '_' + weaviateTextKey : weaviateIndex
|
||||||
await recordManager.createSchema()
|
await recordManager.createSchema()
|
||||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||||
const filterKeys: ICommonObject = {}
|
const keys: string[] = await recordManager.listKeys({})
|
||||||
if (options.docId) {
|
|
||||||
filterKeys.docId = options.docId
|
|
||||||
}
|
|
||||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
|
||||||
|
|
||||||
await weaviateStore.delete({ ids: keys })
|
await weaviateStore.delete({ ids: keys })
|
||||||
await recordManager.deleteKeys(keys)
|
await recordManager.deleteKeys(keys)
|
||||||
|
|
@ -298,7 +294,7 @@ class Weaviate_VectorStores implements INode {
|
||||||
if (weaviateTextKey) obj.textKey = weaviateTextKey
|
if (weaviateTextKey) obj.textKey = weaviateTextKey
|
||||||
if (weaviateMetadataKeys) obj.metadataKeys = JSON.parse(weaviateMetadataKeys.replace(/\s/g, ''))
|
if (weaviateMetadataKeys) obj.metadataKeys = JSON.parse(weaviateMetadataKeys.replace(/\s/g, ''))
|
||||||
if (weaviateFilter) {
|
if (weaviateFilter) {
|
||||||
weaviateFilter = typeof weaviateFilter === 'object' ? weaviateFilter : parseJsonBody(weaviateFilter)
|
weaviateFilter = typeof weaviateFilter === 'object' ? weaviateFilter : JSON.parse(weaviateFilter)
|
||||||
}
|
}
|
||||||
|
|
||||||
const vectorStore = (await WeaviateStore.fromExistingIndex(embeddings, obj)) as unknown as VectorStore
|
const vectorStore = (await WeaviateStore.fromExistingIndex(embeddings, obj)) as unknown as VectorStore
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ import { ZepVectorStore, IZepConfig } from '@langchain/community/vectorstores/ze
|
||||||
import { Embeddings } from '@langchain/core/embeddings'
|
import { Embeddings } from '@langchain/core/embeddings'
|
||||||
import { Document } from '@langchain/core/documents'
|
import { Document } from '@langchain/core/documents'
|
||||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
||||||
import { getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils'
|
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||||
import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
||||||
|
|
||||||
class Zep_VectorStores implements INode {
|
class Zep_VectorStores implements INode {
|
||||||
|
|
@ -159,7 +159,7 @@ class Zep_VectorStores implements INode {
|
||||||
}
|
}
|
||||||
if (apiKey) zepConfig.apiKey = apiKey
|
if (apiKey) zepConfig.apiKey = apiKey
|
||||||
if (zepMetadataFilter) {
|
if (zepMetadataFilter) {
|
||||||
const metadatafilter = typeof zepMetadataFilter === 'object' ? zepMetadataFilter : parseJsonBody(zepMetadataFilter)
|
const metadatafilter = typeof zepMetadataFilter === 'object' ? zepMetadataFilter : JSON.parse(zepMetadataFilter)
|
||||||
zepConfig.filter = metadatafilter
|
zepConfig.filter = metadatafilter
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ import { ZepClient } from '@getzep/zep-cloud'
|
||||||
import { IZepConfig, ZepVectorStore } from '@getzep/zep-cloud/langchain'
|
import { IZepConfig, ZepVectorStore } from '@getzep/zep-cloud/langchain'
|
||||||
import { Document } from 'langchain/document'
|
import { Document } from 'langchain/document'
|
||||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
||||||
import { getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils'
|
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||||
import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
||||||
import { FakeEmbeddings } from 'langchain/embeddings/fake'
|
import { FakeEmbeddings } from 'langchain/embeddings/fake'
|
||||||
import { Embeddings } from '@langchain/core/embeddings'
|
import { Embeddings } from '@langchain/core/embeddings'
|
||||||
|
|
@ -129,7 +129,7 @@ class Zep_CloudVectorStores implements INode {
|
||||||
collectionName: zepCollection
|
collectionName: zepCollection
|
||||||
}
|
}
|
||||||
if (zepMetadataFilter) {
|
if (zepMetadataFilter) {
|
||||||
zepConfig.filter = typeof zepMetadataFilter === 'object' ? zepMetadataFilter : parseJsonBody(zepMetadataFilter)
|
zepConfig.filter = typeof zepMetadataFilter === 'object' ? zepMetadataFilter : JSON.parse(zepMetadataFilter)
|
||||||
}
|
}
|
||||||
zepConfig.client = new ZepClient({
|
zepConfig.client = new ZepClient({
|
||||||
apiKey: apiKey
|
apiKey: apiKey
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "flowise-components",
|
"name": "flowise-components",
|
||||||
"version": "3.0.11",
|
"version": "3.0.7",
|
||||||
"description": "Flowiseai Components",
|
"description": "Flowiseai Components",
|
||||||
"main": "dist/src/index",
|
"main": "dist/src/index",
|
||||||
"types": "dist/src/index.d.ts",
|
"types": "dist/src/index.d.ts",
|
||||||
|
|
@ -33,7 +33,6 @@
|
||||||
"@dqbd/tiktoken": "^1.0.21",
|
"@dqbd/tiktoken": "^1.0.21",
|
||||||
"@e2b/code-interpreter": "^1.5.1",
|
"@e2b/code-interpreter": "^1.5.1",
|
||||||
"@elastic/elasticsearch": "^8.9.0",
|
"@elastic/elasticsearch": "^8.9.0",
|
||||||
"@elevenlabs/elevenlabs-js": "^2.8.0",
|
|
||||||
"@flowiseai/nodevm": "^3.9.25",
|
"@flowiseai/nodevm": "^3.9.25",
|
||||||
"@getzep/zep-cloud": "~1.0.7",
|
"@getzep/zep-cloud": "~1.0.7",
|
||||||
"@getzep/zep-js": "^0.9.0",
|
"@getzep/zep-js": "^0.9.0",
|
||||||
|
|
@ -42,9 +41,8 @@
|
||||||
"@google-ai/generativelanguage": "^2.5.0",
|
"@google-ai/generativelanguage": "^2.5.0",
|
||||||
"@google-cloud/storage": "^7.15.2",
|
"@google-cloud/storage": "^7.15.2",
|
||||||
"@google/generative-ai": "^0.24.0",
|
"@google/generative-ai": "^0.24.0",
|
||||||
"@grpc/grpc-js": "^1.10.10",
|
"@huggingface/inference": "^2.6.1",
|
||||||
"@huggingface/inference": "^4.13.2",
|
"@langchain/anthropic": "0.3.14",
|
||||||
"@langchain/anthropic": "0.3.33",
|
|
||||||
"@langchain/aws": "^0.1.11",
|
"@langchain/aws": "^0.1.11",
|
||||||
"@langchain/baidu-qianfan": "^0.1.0",
|
"@langchain/baidu-qianfan": "^0.1.0",
|
||||||
"@langchain/cohere": "^0.0.7",
|
"@langchain/cohere": "^0.0.7",
|
||||||
|
|
@ -74,20 +72,6 @@
|
||||||
"@modelcontextprotocol/server-slack": "^2025.1.17",
|
"@modelcontextprotocol/server-slack": "^2025.1.17",
|
||||||
"@notionhq/client": "^2.2.8",
|
"@notionhq/client": "^2.2.8",
|
||||||
"@opensearch-project/opensearch": "^1.2.0",
|
"@opensearch-project/opensearch": "^1.2.0",
|
||||||
"@opentelemetry/api": "1.9.0",
|
|
||||||
"@opentelemetry/auto-instrumentations-node": "^0.52.0",
|
|
||||||
"@opentelemetry/core": "1.27.0",
|
|
||||||
"@opentelemetry/exporter-metrics-otlp-grpc": "0.54.0",
|
|
||||||
"@opentelemetry/exporter-metrics-otlp-http": "0.54.0",
|
|
||||||
"@opentelemetry/exporter-metrics-otlp-proto": "0.54.0",
|
|
||||||
"@opentelemetry/exporter-trace-otlp-grpc": "0.54.0",
|
|
||||||
"@opentelemetry/exporter-trace-otlp-http": "0.54.0",
|
|
||||||
"@opentelemetry/exporter-trace-otlp-proto": "0.54.0",
|
|
||||||
"@opentelemetry/resources": "1.27.0",
|
|
||||||
"@opentelemetry/sdk-metrics": "1.27.0",
|
|
||||||
"@opentelemetry/sdk-node": "^0.54.0",
|
|
||||||
"@opentelemetry/sdk-trace-base": "1.27.0",
|
|
||||||
"@opentelemetry/semantic-conventions": "1.27.0",
|
|
||||||
"@pinecone-database/pinecone": "4.0.0",
|
"@pinecone-database/pinecone": "4.0.0",
|
||||||
"@qdrant/js-client-rest": "^1.9.0",
|
"@qdrant/js-client-rest": "^1.9.0",
|
||||||
"@stripe/agent-toolkit": "^0.1.20",
|
"@stripe/agent-toolkit": "^0.1.20",
|
||||||
|
|
|
||||||
|
|
@ -134,7 +134,6 @@ export interface INodeProperties {
|
||||||
documentation?: string
|
documentation?: string
|
||||||
color?: string
|
color?: string
|
||||||
hint?: string
|
hint?: string
|
||||||
warning?: string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface INode extends INodeProperties {
|
export interface INode extends INodeProperties {
|
||||||
|
|
@ -442,9 +441,6 @@ export interface IServerSideEventStreamer {
|
||||||
streamAbortEvent(chatId: string): void
|
streamAbortEvent(chatId: string): void
|
||||||
streamEndEvent(chatId: string): void
|
streamEndEvent(chatId: string): void
|
||||||
streamUsageMetadataEvent(chatId: string, data: any): void
|
streamUsageMetadataEvent(chatId: string, data: any): void
|
||||||
streamTTSStartEvent(chatId: string, chatMessageId: string, format: string): void
|
|
||||||
streamTTSDataEvent(chatId: string, chatMessageId: string, audioChunk: string): void
|
|
||||||
streamTTSEndEvent(chatId: string, chatMessageId: string): void
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum FollowUpPromptProvider {
|
export enum FollowUpPromptProvider {
|
||||||
|
|
|
||||||
|
|
@ -1021,7 +1021,7 @@ export class JsonOutputToolsParser extends BaseLLMOutputParser<ParsedToolCall[]>
|
||||||
const parsedToolCalls = []
|
const parsedToolCalls = []
|
||||||
|
|
||||||
if (!toolCalls) {
|
if (!toolCalls) {
|
||||||
// @ts-expect-error name and arguments are defined by Object.defineProperty
|
// @ts-expect-error name and arguemnts are defined by Object.defineProperty
|
||||||
const parsedToolCall: ParsedToolCall = {
|
const parsedToolCall: ParsedToolCall = {
|
||||||
type: 'undefined',
|
type: 'undefined',
|
||||||
args: {}
|
args: {}
|
||||||
|
|
@ -1047,7 +1047,7 @@ export class JsonOutputToolsParser extends BaseLLMOutputParser<ParsedToolCall[]>
|
||||||
const clonedToolCalls = JSON.parse(JSON.stringify(toolCalls))
|
const clonedToolCalls = JSON.parse(JSON.stringify(toolCalls))
|
||||||
for (const toolCall of clonedToolCalls) {
|
for (const toolCall of clonedToolCalls) {
|
||||||
if (toolCall.function !== undefined) {
|
if (toolCall.function !== undefined) {
|
||||||
// @ts-expect-error name and arguments are defined by Object.defineProperty
|
// @ts-expect-error name and arguemnts are defined by Object.defineProperty
|
||||||
const parsedToolCall: ParsedToolCall = {
|
const parsedToolCall: ParsedToolCall = {
|
||||||
type: toolCall.function.name,
|
type: toolCall.function.name,
|
||||||
args: JSON.parse(toolCall.function.arguments)
|
args: JSON.parse(toolCall.function.arguments)
|
||||||
|
|
|
||||||
|
|
@ -1774,7 +1774,7 @@ export class AnalyticHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (Object.prototype.hasOwnProperty.call(this.handlers, 'lunary')) {
|
if (Object.prototype.hasOwnProperty.call(this.handlers, 'lunary')) {
|
||||||
const toolEventId: string = this.handlers['lunary'].toolEvent[returnIds['lunary'].toolEvent]
|
const toolEventId: string = this.handlers['lunary'].llmEvent[returnIds['lunary'].toolEvent]
|
||||||
const monitor = this.handlers['lunary'].client
|
const monitor = this.handlers['lunary'].client
|
||||||
|
|
||||||
if (monitor && toolEventId) {
|
if (monitor && toolEventId) {
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,6 @@ dotenv.config({ path: envPath, override: true })
|
||||||
export * from './Interface'
|
export * from './Interface'
|
||||||
export * from './utils'
|
export * from './utils'
|
||||||
export * from './speechToText'
|
export * from './speechToText'
|
||||||
export * from './textToSpeech'
|
|
||||||
export * from './storageUtils'
|
export * from './storageUtils'
|
||||||
export * from './handler'
|
export * from './handler'
|
||||||
export * from '../evaluation/EvaluationRunner'
|
export * from '../evaluation/EvaluationRunner'
|
||||||
|
|
|
||||||
|
|
@ -8,10 +8,6 @@ import { IndexingResult } from './Interface'
|
||||||
|
|
||||||
type Metadata = Record<string, unknown>
|
type Metadata = Record<string, unknown>
|
||||||
|
|
||||||
export interface ExtendedRecordManagerInterface extends RecordManagerInterface {
|
|
||||||
update(keys: Array<{ uid: string; docId: string }> | string[], updateOptions?: Record<string, any>): Promise<void>
|
|
||||||
}
|
|
||||||
|
|
||||||
type StringOrDocFunc = string | ((doc: DocumentInterface) => string)
|
type StringOrDocFunc = string | ((doc: DocumentInterface) => string)
|
||||||
|
|
||||||
export interface HashedDocumentInterface extends DocumentInterface {
|
export interface HashedDocumentInterface extends DocumentInterface {
|
||||||
|
|
@ -211,7 +207,7 @@ export const _isBaseDocumentLoader = (arg: any): arg is BaseDocumentLoader => {
|
||||||
|
|
||||||
interface IndexArgs {
|
interface IndexArgs {
|
||||||
docsSource: BaseDocumentLoader | DocumentInterface[]
|
docsSource: BaseDocumentLoader | DocumentInterface[]
|
||||||
recordManager: ExtendedRecordManagerInterface
|
recordManager: RecordManagerInterface
|
||||||
vectorStore: VectorStore
|
vectorStore: VectorStore
|
||||||
options?: IndexOptions
|
options?: IndexOptions
|
||||||
}
|
}
|
||||||
|
|
@ -279,7 +275,7 @@ export async function index(args: IndexArgs): Promise<IndexingResult> {
|
||||||
|
|
||||||
const uids: string[] = []
|
const uids: string[] = []
|
||||||
const docsToIndex: DocumentInterface[] = []
|
const docsToIndex: DocumentInterface[] = []
|
||||||
const docsToUpdate: Array<{ uid: string; docId: string }> = []
|
const docsToUpdate: string[] = []
|
||||||
const seenDocs = new Set<string>()
|
const seenDocs = new Set<string>()
|
||||||
hashedDocs.forEach((hashedDoc, i) => {
|
hashedDocs.forEach((hashedDoc, i) => {
|
||||||
const docExists = batchExists[i]
|
const docExists = batchExists[i]
|
||||||
|
|
@ -287,7 +283,7 @@ export async function index(args: IndexArgs): Promise<IndexingResult> {
|
||||||
if (forceUpdate) {
|
if (forceUpdate) {
|
||||||
seenDocs.add(hashedDoc.uid)
|
seenDocs.add(hashedDoc.uid)
|
||||||
} else {
|
} else {
|
||||||
docsToUpdate.push({ uid: hashedDoc.uid, docId: hashedDoc.metadata.docId as string })
|
docsToUpdate.push(hashedDoc.uid)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -312,7 +308,7 @@ export async function index(args: IndexArgs): Promise<IndexingResult> {
|
||||||
}
|
}
|
||||||
|
|
||||||
await recordManager.update(
|
await recordManager.update(
|
||||||
hashedDocs.map((doc) => ({ uid: doc.uid, docId: doc.metadata.docId as string })),
|
hashedDocs.map((doc) => doc.uid),
|
||||||
{ timeAtLeast: indexStartDt, groupIds: sourceIds }
|
{ timeAtLeast: indexStartDt, groupIds: sourceIds }
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,8 +16,7 @@ export class SecureZodSchemaParser {
|
||||||
'optional',
|
'optional',
|
||||||
'max',
|
'max',
|
||||||
'min',
|
'min',
|
||||||
'describe',
|
'describe'
|
||||||
'default'
|
|
||||||
]
|
]
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -138,25 +137,7 @@ export class SecureZodSchemaParser {
|
||||||
private static parseZodType(typeStr: string): any {
|
private static parseZodType(typeStr: string): any {
|
||||||
// Check if this is a nested object (not in an array)
|
// Check if this is a nested object (not in an array)
|
||||||
if (typeStr.startsWith('z.object(') && !typeStr.startsWith('z.array(')) {
|
if (typeStr.startsWith('z.object(') && !typeStr.startsWith('z.array(')) {
|
||||||
// Check if there are modifiers after the object
|
// Extract object content
|
||||||
const objectWithModifiers = this.extractObjectWithModifiers(typeStr)
|
|
||||||
if (objectWithModifiers.hasModifiers) {
|
|
||||||
const objectMatch = objectWithModifiers.objectPart.match(/z\.object\(\s*\{([\s\S]*)\}\s*\)/)
|
|
||||||
if (!objectMatch) {
|
|
||||||
throw new Error('Invalid object syntax')
|
|
||||||
}
|
|
||||||
|
|
||||||
const objectContent = objectMatch[1]
|
|
||||||
const objectProperties = this.parseObjectProperties(objectContent)
|
|
||||||
|
|
||||||
return {
|
|
||||||
isNestedObject: true,
|
|
||||||
objectSchema: objectProperties,
|
|
||||||
modifiers: objectWithModifiers.modifiers
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Original code for objects without modifiers
|
|
||||||
const objectMatch = typeStr.match(/z\.object\(\s*\{([\s\S]*)\}\s*\)/)
|
const objectMatch = typeStr.match(/z\.object\(\s*\{([\s\S]*)\}\s*\)/)
|
||||||
if (!objectMatch) {
|
if (!objectMatch) {
|
||||||
throw new Error('Invalid object syntax')
|
throw new Error('Invalid object syntax')
|
||||||
|
|
@ -173,16 +154,6 @@ export class SecureZodSchemaParser {
|
||||||
|
|
||||||
// Check if this is any kind of array
|
// Check if this is any kind of array
|
||||||
if (typeStr.startsWith('z.array(')) {
|
if (typeStr.startsWith('z.array(')) {
|
||||||
// Check if there are modifiers after the array
|
|
||||||
const arrayWithModifiers = this.extractArrayWithModifiers(typeStr)
|
|
||||||
if (arrayWithModifiers.hasModifiers) {
|
|
||||||
const arrayResult = this.parseArray(arrayWithModifiers.arrayPart)
|
|
||||||
// Convert array result to have modifiers
|
|
||||||
return {
|
|
||||||
...arrayResult,
|
|
||||||
modifiers: arrayWithModifiers.modifiers
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this.parseArray(typeStr)
|
return this.parseArray(typeStr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -358,191 +329,6 @@ export class SecureZodSchemaParser {
|
||||||
return items
|
return items
|
||||||
}
|
}
|
||||||
|
|
||||||
private static extractArrayWithModifiers(typeStr: string): { arrayPart: string; modifiers: any[]; hasModifiers: boolean } {
|
|
||||||
// Find the matching closing parenthesis for z.array(
|
|
||||||
let depth = 0
|
|
||||||
let arrayEndIndex = -1
|
|
||||||
let startIndex = typeStr.indexOf('z.array(') + 7 // Position after "z.array"
|
|
||||||
|
|
||||||
for (let i = startIndex; i < typeStr.length; i++) {
|
|
||||||
if (typeStr[i] === '(') depth++
|
|
||||||
else if (typeStr[i] === ')') {
|
|
||||||
depth--
|
|
||||||
if (depth === 0) {
|
|
||||||
arrayEndIndex = i + 1
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (arrayEndIndex === -1) {
|
|
||||||
return { arrayPart: typeStr, modifiers: [], hasModifiers: false }
|
|
||||||
}
|
|
||||||
|
|
||||||
const arrayPart = typeStr.substring(0, arrayEndIndex)
|
|
||||||
const remainingPart = typeStr.substring(arrayEndIndex)
|
|
||||||
|
|
||||||
if (!remainingPart.startsWith('.')) {
|
|
||||||
return { arrayPart: typeStr, modifiers: [], hasModifiers: false }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse modifiers
|
|
||||||
const modifiers: any[] = []
|
|
||||||
const modifierParts = remainingPart.substring(1).split('.')
|
|
||||||
|
|
||||||
for (const part of modifierParts) {
|
|
||||||
const modMatch = part.match(/^(\w+)(\(.*\))?$/)
|
|
||||||
if (!modMatch) {
|
|
||||||
throw new Error(`Invalid modifier: ${part}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const modName = modMatch[1]
|
|
||||||
const modArgs = modMatch[2] ? this.parseArguments(modMatch[2]) : []
|
|
||||||
|
|
||||||
if (!this.ALLOWED_TYPES.includes(modName)) {
|
|
||||||
throw new Error(`Unsupported modifier: ${modName}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
modifiers.push({ name: modName, args: modArgs })
|
|
||||||
}
|
|
||||||
|
|
||||||
return { arrayPart, modifiers, hasModifiers: true }
|
|
||||||
}
|
|
||||||
|
|
||||||
private static extractObjectWithModifiers(typeStr: string): { objectPart: string; modifiers: any[]; hasModifiers: boolean } {
|
|
||||||
// Find the matching closing brace and parenthesis for z.object({...})
|
|
||||||
let braceDepth = 0
|
|
||||||
let parenDepth = 0
|
|
||||||
let objectEndIndex = -1
|
|
||||||
let startIndex = typeStr.indexOf('z.object(') + 8 // Position after "z.object"
|
|
||||||
let foundOpenBrace = false
|
|
||||||
|
|
||||||
for (let i = startIndex; i < typeStr.length; i++) {
|
|
||||||
if (typeStr[i] === '{') {
|
|
||||||
braceDepth++
|
|
||||||
foundOpenBrace = true
|
|
||||||
} else if (typeStr[i] === '}') {
|
|
||||||
braceDepth--
|
|
||||||
} else if (typeStr[i] === '(' && foundOpenBrace) {
|
|
||||||
parenDepth++
|
|
||||||
} else if (typeStr[i] === ')' && foundOpenBrace) {
|
|
||||||
if (braceDepth === 0 && parenDepth === 0) {
|
|
||||||
objectEndIndex = i + 1
|
|
||||||
break
|
|
||||||
}
|
|
||||||
parenDepth--
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (objectEndIndex === -1) {
|
|
||||||
return { objectPart: typeStr, modifiers: [], hasModifiers: false }
|
|
||||||
}
|
|
||||||
|
|
||||||
const objectPart = typeStr.substring(0, objectEndIndex)
|
|
||||||
const remainingPart = typeStr.substring(objectEndIndex)
|
|
||||||
|
|
||||||
if (!remainingPart.startsWith('.')) {
|
|
||||||
return { objectPart: typeStr, modifiers: [], hasModifiers: false }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse modifiers (need special handling for .default() with object argument)
|
|
||||||
const modifiers: any[] = []
|
|
||||||
let i = 1 // Skip the initial dot
|
|
||||||
|
|
||||||
while (i < remainingPart.length) {
|
|
||||||
// Find modifier name
|
|
||||||
const modNameMatch = remainingPart.substring(i).match(/^(\w+)/)
|
|
||||||
if (!modNameMatch) break
|
|
||||||
|
|
||||||
const modName = modNameMatch[1]
|
|
||||||
i += modName.length
|
|
||||||
|
|
||||||
// Check for arguments
|
|
||||||
let modArgs: any[] = []
|
|
||||||
if (i < remainingPart.length && remainingPart[i] === '(') {
|
|
||||||
// Find matching closing paren, handling nested structures
|
|
||||||
let depth = 0
|
|
||||||
let argStart = i
|
|
||||||
for (let j = i; j < remainingPart.length; j++) {
|
|
||||||
if (remainingPart[j] === '(') depth++
|
|
||||||
else if (remainingPart[j] === ')') {
|
|
||||||
depth--
|
|
||||||
if (depth === 0) {
|
|
||||||
const argsStr = remainingPart.substring(argStart, j + 1)
|
|
||||||
modArgs = this.parseComplexArguments(argsStr)
|
|
||||||
i = j + 1
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!this.ALLOWED_TYPES.includes(modName)) {
|
|
||||||
throw new Error(`Unsupported modifier: ${modName}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
modifiers.push({ name: modName, args: modArgs })
|
|
||||||
|
|
||||||
// Skip dot if present
|
|
||||||
if (i < remainingPart.length && remainingPart[i] === '.') {
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { objectPart, modifiers, hasModifiers: modifiers.length > 0 }
|
|
||||||
}
|
|
||||||
|
|
||||||
private static parseComplexArguments(argsStr: string): any[] {
|
|
||||||
// Remove outer parentheses
|
|
||||||
const inner = argsStr.slice(1, -1).trim()
|
|
||||||
if (!inner) return []
|
|
||||||
|
|
||||||
// Check if it's an object literal
|
|
||||||
if (inner.startsWith('{') && inner.endsWith('}')) {
|
|
||||||
// Parse object literal for .default()
|
|
||||||
return [this.parseObjectLiteral(inner)]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use existing parseArguments for simple cases
|
|
||||||
return this.parseArguments(argsStr)
|
|
||||||
}
|
|
||||||
|
|
||||||
private static parseObjectLiteral(objStr: string): any {
|
|
||||||
// Simple object literal parser for default values
|
|
||||||
const obj: any = {}
|
|
||||||
const content = objStr.slice(1, -1).trim() // Remove { }
|
|
||||||
|
|
||||||
if (!content) return obj
|
|
||||||
|
|
||||||
// Split by comma at depth 0
|
|
||||||
const props = this.splitProperties(content)
|
|
||||||
|
|
||||||
for (const prop of props) {
|
|
||||||
const colonIndex = prop.indexOf(':')
|
|
||||||
if (colonIndex === -1) continue
|
|
||||||
|
|
||||||
const key = prop.substring(0, colonIndex).trim().replace(/['"]/g, '')
|
|
||||||
const valueStr = prop.substring(colonIndex + 1).trim()
|
|
||||||
|
|
||||||
// Parse the value
|
|
||||||
if (valueStr.startsWith('[') && valueStr.endsWith(']')) {
|
|
||||||
// Array value
|
|
||||||
const arrayContent = valueStr.slice(1, -1)
|
|
||||||
obj[key] = this.parseArrayContent(arrayContent)
|
|
||||||
} else if (valueStr.startsWith('"') && valueStr.endsWith('"')) {
|
|
||||||
// String value
|
|
||||||
obj[key] = valueStr.slice(1, -1)
|
|
||||||
} else if (valueStr.match(/^\d+$/)) {
|
|
||||||
// Number value
|
|
||||||
obj[key] = parseInt(valueStr, 10)
|
|
||||||
} else {
|
|
||||||
obj[key] = valueStr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return obj
|
|
||||||
}
|
|
||||||
|
|
||||||
private static buildZodSchema(parsed: Record<string, any>): z.ZodObject<any> {
|
private static buildZodSchema(parsed: Record<string, any>): z.ZodObject<any> {
|
||||||
const schemaObj: Record<string, z.ZodTypeAny> = {}
|
const schemaObj: Record<string, z.ZodTypeAny> = {}
|
||||||
|
|
||||||
|
|
@ -556,40 +342,19 @@ export class SecureZodSchemaParser {
|
||||||
private static buildZodType(typeInfo: any): z.ZodTypeAny {
|
private static buildZodType(typeInfo: any): z.ZodTypeAny {
|
||||||
// Special case for nested objects
|
// Special case for nested objects
|
||||||
if (typeInfo.isNestedObject) {
|
if (typeInfo.isNestedObject) {
|
||||||
let zodType: z.ZodTypeAny = this.buildZodSchema(typeInfo.objectSchema)
|
return this.buildZodSchema(typeInfo.objectSchema)
|
||||||
|
|
||||||
// Apply modifiers if present
|
|
||||||
if (typeInfo.modifiers) {
|
|
||||||
zodType = this.applyModifiers(zodType, typeInfo.modifiers)
|
|
||||||
}
|
|
||||||
|
|
||||||
return zodType
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Special case for array of objects
|
// Special case for array of objects
|
||||||
if (typeInfo.isArrayOfObjects) {
|
if (typeInfo.isArrayOfObjects) {
|
||||||
const objectSchema = this.buildZodSchema(typeInfo.objectSchema)
|
const objectSchema = this.buildZodSchema(typeInfo.objectSchema)
|
||||||
let zodType: z.ZodTypeAny = z.array(objectSchema)
|
return z.array(objectSchema)
|
||||||
|
|
||||||
// Apply modifiers if present
|
|
||||||
if (typeInfo.modifiers) {
|
|
||||||
zodType = this.applyModifiers(zodType, typeInfo.modifiers)
|
|
||||||
}
|
|
||||||
|
|
||||||
return zodType
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Special case for simple arrays
|
// Special case for simple arrays
|
||||||
if (typeInfo.isSimpleArray) {
|
if (typeInfo.isSimpleArray) {
|
||||||
const innerZodType = this.buildZodType(typeInfo.innerType)
|
const innerZodType = this.buildZodType(typeInfo.innerType)
|
||||||
let zodType: z.ZodTypeAny = z.array(innerZodType)
|
return z.array(innerZodType)
|
||||||
|
|
||||||
// Apply modifiers if present
|
|
||||||
if (typeInfo.modifiers) {
|
|
||||||
zodType = this.applyModifiers(zodType, typeInfo.modifiers)
|
|
||||||
}
|
|
||||||
|
|
||||||
return zodType
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let zodType: z.ZodTypeAny
|
let zodType: z.ZodTypeAny
|
||||||
|
|
@ -621,13 +386,7 @@ export class SecureZodSchemaParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Apply modifiers
|
// Apply modifiers
|
||||||
zodType = this.applyModifiers(zodType, typeInfo.modifiers || [])
|
for (const modifier of typeInfo.modifiers || []) {
|
||||||
|
|
||||||
return zodType
|
|
||||||
}
|
|
||||||
|
|
||||||
private static applyModifiers(zodType: z.ZodTypeAny, modifiers: any[]): z.ZodTypeAny {
|
|
||||||
for (const modifier of modifiers) {
|
|
||||||
switch (modifier.name) {
|
switch (modifier.name) {
|
||||||
case 'int':
|
case 'int':
|
||||||
if (zodType._def?.typeName === 'ZodNumber') {
|
if (zodType._def?.typeName === 'ZodNumber') {
|
||||||
|
|
@ -663,16 +422,12 @@ export class SecureZodSchemaParser {
|
||||||
zodType = zodType.describe(modifier.args[0])
|
zodType = zodType.describe(modifier.args[0])
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
case 'default':
|
|
||||||
if (modifier.args[0] !== undefined) {
|
|
||||||
zodType = zodType.default(modifier.args[0])
|
|
||||||
}
|
|
||||||
break
|
|
||||||
default:
|
default:
|
||||||
// Ignore unknown modifiers for compatibility
|
// Ignore unknown modifiers for compatibility
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return zodType
|
return zodType
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,240 +0,0 @@
|
||||||
import { ICommonObject } from './Interface'
|
|
||||||
import { getCredentialData } from './utils'
|
|
||||||
import OpenAI from 'openai'
|
|
||||||
import { ElevenLabsClient } from '@elevenlabs/elevenlabs-js'
|
|
||||||
import { Readable } from 'node:stream'
|
|
||||||
import type { ReadableStream } from 'node:stream/web'
|
|
||||||
|
|
||||||
const TextToSpeechType = {
|
|
||||||
OPENAI_TTS: 'openai',
|
|
||||||
ELEVEN_LABS_TTS: 'elevenlabs'
|
|
||||||
}
|
|
||||||
|
|
||||||
export const convertTextToSpeechStream = async (
|
|
||||||
text: string,
|
|
||||||
textToSpeechConfig: ICommonObject,
|
|
||||||
options: ICommonObject,
|
|
||||||
abortController: AbortController,
|
|
||||||
onStart: (format: string) => void,
|
|
||||||
onChunk: (chunk: Buffer) => void,
|
|
||||||
onEnd: () => void
|
|
||||||
): Promise<void> => {
|
|
||||||
return new Promise<void>((resolve, reject) => {
|
|
||||||
let streamDestroyed = false
|
|
||||||
|
|
||||||
// Handle abort signal early
|
|
||||||
if (abortController.signal.aborted) {
|
|
||||||
reject(new Error('TTS generation aborted'))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const processStream = async () => {
|
|
||||||
try {
|
|
||||||
if (textToSpeechConfig) {
|
|
||||||
const credentialId = textToSpeechConfig.credentialId as string
|
|
||||||
const credentialData = await getCredentialData(credentialId ?? '', options)
|
|
||||||
|
|
||||||
switch (textToSpeechConfig.name) {
|
|
||||||
case TextToSpeechType.OPENAI_TTS: {
|
|
||||||
onStart('mp3')
|
|
||||||
|
|
||||||
const openai = new OpenAI({
|
|
||||||
apiKey: credentialData.openAIApiKey
|
|
||||||
})
|
|
||||||
|
|
||||||
const response = await openai.audio.speech.create(
|
|
||||||
{
|
|
||||||
model: 'gpt-4o-mini-tts',
|
|
||||||
voice: (textToSpeechConfig.voice || 'alloy') as
|
|
||||||
| 'alloy'
|
|
||||||
| 'ash'
|
|
||||||
| 'ballad'
|
|
||||||
| 'coral'
|
|
||||||
| 'echo'
|
|
||||||
| 'fable'
|
|
||||||
| 'nova'
|
|
||||||
| 'onyx'
|
|
||||||
| 'sage'
|
|
||||||
| 'shimmer',
|
|
||||||
input: text,
|
|
||||||
response_format: 'mp3'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
signal: abortController.signal
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const stream = response.body as unknown as Readable
|
|
||||||
if (!stream) {
|
|
||||||
throw new Error('Failed to get response stream')
|
|
||||||
}
|
|
||||||
|
|
||||||
await processStreamWithRateLimit(stream, onChunk, onEnd, resolve, reject, 640, 20, abortController, () => {
|
|
||||||
streamDestroyed = true
|
|
||||||
})
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
case TextToSpeechType.ELEVEN_LABS_TTS: {
|
|
||||||
onStart('mp3')
|
|
||||||
|
|
||||||
const client = new ElevenLabsClient({
|
|
||||||
apiKey: credentialData.elevenLabsApiKey
|
|
||||||
})
|
|
||||||
|
|
||||||
const response = await client.textToSpeech.stream(
|
|
||||||
textToSpeechConfig.voice || '21m00Tcm4TlvDq8ikWAM',
|
|
||||||
{
|
|
||||||
text: text,
|
|
||||||
modelId: 'eleven_multilingual_v2'
|
|
||||||
},
|
|
||||||
{ abortSignal: abortController.signal }
|
|
||||||
)
|
|
||||||
|
|
||||||
const stream = Readable.fromWeb(response as unknown as ReadableStream)
|
|
||||||
if (!stream) {
|
|
||||||
throw new Error('Failed to get response stream')
|
|
||||||
}
|
|
||||||
|
|
||||||
await processStreamWithRateLimit(stream, onChunk, onEnd, resolve, reject, 640, 40, abortController, () => {
|
|
||||||
streamDestroyed = true
|
|
||||||
})
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
reject(new Error('Text to speech is not selected. Please configure TTS in the chatflow.'))
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
reject(error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle abort signal
|
|
||||||
abortController.signal.addEventListener('abort', () => {
|
|
||||||
if (!streamDestroyed) {
|
|
||||||
reject(new Error('TTS generation aborted'))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
processStream()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const processStreamWithRateLimit = async (
|
|
||||||
stream: Readable,
|
|
||||||
onChunk: (chunk: Buffer) => void,
|
|
||||||
onEnd: () => void,
|
|
||||||
resolve: () => void,
|
|
||||||
reject: (error: any) => void,
|
|
||||||
targetChunkSize: number = 640,
|
|
||||||
rateLimitMs: number = 20,
|
|
||||||
abortController: AbortController,
|
|
||||||
onStreamDestroy?: () => void
|
|
||||||
) => {
|
|
||||||
const TARGET_CHUNK_SIZE = targetChunkSize
|
|
||||||
const RATE_LIMIT_MS = rateLimitMs
|
|
||||||
|
|
||||||
let buffer: Buffer = Buffer.alloc(0)
|
|
||||||
let isEnded = false
|
|
||||||
|
|
||||||
const processChunks = async () => {
|
|
||||||
while (!isEnded || buffer.length > 0) {
|
|
||||||
// Check if aborted
|
|
||||||
if (abortController.signal.aborted) {
|
|
||||||
if (!stream.destroyed) {
|
|
||||||
stream.destroy()
|
|
||||||
}
|
|
||||||
onStreamDestroy?.()
|
|
||||||
reject(new Error('TTS generation aborted'))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (buffer.length >= TARGET_CHUNK_SIZE) {
|
|
||||||
const chunk = buffer.subarray(0, TARGET_CHUNK_SIZE)
|
|
||||||
buffer = buffer.subarray(TARGET_CHUNK_SIZE)
|
|
||||||
onChunk(chunk)
|
|
||||||
await sleep(RATE_LIMIT_MS)
|
|
||||||
} else if (isEnded && buffer.length > 0) {
|
|
||||||
onChunk(buffer)
|
|
||||||
buffer = Buffer.alloc(0)
|
|
||||||
} else if (!isEnded) {
|
|
||||||
await sleep(RATE_LIMIT_MS)
|
|
||||||
} else {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
onEnd()
|
|
||||||
resolve()
|
|
||||||
}
|
|
||||||
|
|
||||||
stream.on('data', (chunk) => {
|
|
||||||
if (!abortController.signal.aborted) {
|
|
||||||
buffer = Buffer.concat([buffer, Buffer.from(chunk)])
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
stream.on('end', () => {
|
|
||||||
isEnded = true
|
|
||||||
})
|
|
||||||
|
|
||||||
stream.on('error', (error) => {
|
|
||||||
reject(error)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Handle abort signal
|
|
||||||
abortController.signal.addEventListener('abort', () => {
|
|
||||||
if (!stream.destroyed) {
|
|
||||||
stream.destroy()
|
|
||||||
}
|
|
||||||
onStreamDestroy?.()
|
|
||||||
reject(new Error('TTS generation aborted'))
|
|
||||||
})
|
|
||||||
|
|
||||||
processChunks().catch(reject)
|
|
||||||
}
|
|
||||||
|
|
||||||
const sleep = (ms: number): Promise<void> => {
|
|
||||||
return new Promise((resolve) => setTimeout(resolve, ms))
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getVoices = async (provider: string, credentialId: string, options: ICommonObject) => {
|
|
||||||
const credentialData = await getCredentialData(credentialId ?? '', options)
|
|
||||||
|
|
||||||
switch (provider) {
|
|
||||||
case TextToSpeechType.OPENAI_TTS:
|
|
||||||
return [
|
|
||||||
{ id: 'alloy', name: 'Alloy' },
|
|
||||||
{ id: 'ash', name: 'Ash' },
|
|
||||||
{ id: 'ballad', name: 'Ballad' },
|
|
||||||
{ id: 'coral', name: 'Coral' },
|
|
||||||
{ id: 'echo', name: 'Echo' },
|
|
||||||
{ id: 'fable', name: 'Fable' },
|
|
||||||
{ id: 'nova', name: 'Nova' },
|
|
||||||
{ id: 'onyx', name: 'Onyx' },
|
|
||||||
{ id: 'sage', name: 'Sage' },
|
|
||||||
{ id: 'shimmer', name: 'Shimmer' }
|
|
||||||
]
|
|
||||||
|
|
||||||
case TextToSpeechType.ELEVEN_LABS_TTS: {
|
|
||||||
const client = new ElevenLabsClient({
|
|
||||||
apiKey: credentialData.elevenLabsApiKey
|
|
||||||
})
|
|
||||||
|
|
||||||
const voices = await client.voices.search({
|
|
||||||
pageSize: 100,
|
|
||||||
voiceType: 'default',
|
|
||||||
category: 'premade'
|
|
||||||
})
|
|
||||||
|
|
||||||
return voices.voices.map((voice) => ({
|
|
||||||
id: voice.voiceId,
|
|
||||||
name: voice.name,
|
|
||||||
category: voice.category
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`Unsupported TTS provider: ${provider}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue