Compare commits
44 Commits
chore/spli
...
main
| Author | SHA1 | Date |
|---|---|---|
|
|
465005a503 | |
|
|
e6e0c2d07b | |
|
|
660a8e357a | |
|
|
113180d03b | |
|
|
069ba28bc0 | |
|
|
20db1597a4 | |
|
|
478a294095 | |
|
|
6a59af11e6 | |
|
|
562370b8e2 | |
|
|
4e92db6910 | |
|
|
7cc2c13694 | |
|
|
3ad2b3a559 | |
|
|
da32fc7167 | |
|
|
315e3aedc3 | |
|
|
9dbb4bf623 | |
|
|
1f3f7a7194 | |
|
|
4d79653741 | |
|
|
03ef28afbc | |
|
|
0cc7b3036e | |
|
|
097404f24a | |
|
|
2029588d4d | |
|
|
c9db81096a | |
|
|
b5f7fac015 | |
|
|
ca22160361 | |
|
|
ffe69936dc | |
|
|
b8f7a200fb | |
|
|
2f2b6e1713 | |
|
|
4e1fac501f | |
|
|
888994bc8f | |
|
|
3cab803918 | |
|
|
366d38b861 | |
|
|
2414057c08 | |
|
|
4a642f02d0 | |
|
|
ceb0512e2f | |
|
|
94cae3b66f | |
|
|
3fafd15a80 | |
|
|
9ff3d653ae | |
|
|
0dc14b5cd3 | |
|
|
b9a020dc70 | |
|
|
761ffe6851 | |
|
|
6d3755d16e | |
|
|
faf0a0a315 | |
|
|
4624e15c2e | |
|
|
a7b6f9b208 |
|
|
@ -0,0 +1,72 @@
|
|||
name: Docker Image CI - Docker Hub
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
node_version:
|
||||
description: 'Node.js version to build this image with.'
|
||||
type: choice
|
||||
required: true
|
||||
default: '20'
|
||||
options:
|
||||
- '20'
|
||||
tag_version:
|
||||
description: 'Tag version of the image to be pushed.'
|
||||
type: string
|
||||
required: true
|
||||
default: 'latest'
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set default values
|
||||
id: defaults
|
||||
run: |
|
||||
echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT
|
||||
echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
# -------------------------
|
||||
# Build and push main image
|
||||
# -------------------------
|
||||
- name: Build and push main image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
build-args: |
|
||||
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
flowiseai/flowise:${{ steps.defaults.outputs.tag_version }}
|
||||
|
||||
# -------------------------
|
||||
# Build and push worker image
|
||||
# -------------------------
|
||||
- name: Build and push worker image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: .
|
||||
file: docker/worker/Dockerfile
|
||||
build-args: |
|
||||
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
flowiseai/flowise-worker:${{ steps.defaults.outputs.tag_version }}
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
name: Docker Image CI - AWS ECR
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
environment:
|
||||
description: 'Environment to push the image to.'
|
||||
required: true
|
||||
default: 'dev'
|
||||
type: choice
|
||||
options:
|
||||
- dev
|
||||
- prod
|
||||
node_version:
|
||||
description: 'Node.js version to build this image with.'
|
||||
type: choice
|
||||
required: true
|
||||
default: '20'
|
||||
options:
|
||||
- '20'
|
||||
tag_version:
|
||||
description: 'Tag version of the image to be pushed.'
|
||||
type: string
|
||||
required: true
|
||||
default: 'latest'
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
environment: ${{ github.event.inputs.environment }}
|
||||
steps:
|
||||
- name: Set default values
|
||||
id: defaults
|
||||
run: |
|
||||
echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT
|
||||
echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ secrets.AWS_REGION }}
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
uses: aws-actions/amazon-ecr-login@v1
|
||||
|
||||
# -------------------------
|
||||
# Build and push main image
|
||||
# -------------------------
|
||||
- name: Build and push main image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
build-args: |
|
||||
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
${{ format('{0}.dkr.ecr.{1}.amazonaws.com/flowise:{2}',
|
||||
secrets.AWS_ACCOUNT_ID,
|
||||
secrets.AWS_REGION,
|
||||
steps.defaults.outputs.tag_version) }}
|
||||
|
|
@ -1,114 +0,0 @@
|
|||
name: Docker Image CI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
registry:
|
||||
description: 'Container Registry to push the image to.'
|
||||
type: choice
|
||||
required: true
|
||||
default: 'aws_ecr'
|
||||
options:
|
||||
- 'docker_hub'
|
||||
- 'aws_ecr'
|
||||
environment:
|
||||
description: 'Environment to push the image to.'
|
||||
required: true
|
||||
default: 'dev'
|
||||
type: choice
|
||||
options:
|
||||
- dev
|
||||
- prod
|
||||
image_type:
|
||||
description: 'Type of image to build and push.'
|
||||
type: choice
|
||||
required: true
|
||||
default: 'main'
|
||||
options:
|
||||
- 'main'
|
||||
- 'worker'
|
||||
node_version:
|
||||
description: 'Node.js version to build this image with.'
|
||||
type: choice
|
||||
required: true
|
||||
default: '20'
|
||||
options:
|
||||
- '20'
|
||||
tag_version:
|
||||
description: 'Tag version of the image to be pushed.'
|
||||
type: string
|
||||
required: true
|
||||
default: 'latest'
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
environment: ${{ github.event.inputs.environment }}
|
||||
steps:
|
||||
- name: Set default values
|
||||
id: defaults
|
||||
run: |
|
||||
echo "registry=${{ github.event.inputs.registry || 'aws_ecr' }}" >> $GITHUB_OUTPUT
|
||||
echo "image_type=${{ github.event.inputs.image_type || 'main' }}" >> $GITHUB_OUTPUT
|
||||
echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT
|
||||
echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
|
||||
# ------------------------
|
||||
# Login Steps (conditional)
|
||||
# ------------------------
|
||||
- name: Login to Docker Hub
|
||||
if: steps.defaults.outputs.registry == 'docker_hub'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
if: steps.defaults.outputs.registry == 'aws_ecr'
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ secrets.AWS_REGION }}
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
if: steps.defaults.outputs.registry == 'aws_ecr'
|
||||
uses: aws-actions/amazon-ecr-login@v1
|
||||
|
||||
# -------------------------
|
||||
# Build and push (conditional tags)
|
||||
# -------------------------
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: .
|
||||
file: |
|
||||
${{
|
||||
steps.defaults.outputs.image_type == 'worker' && 'docker/worker/Dockerfile' ||
|
||||
(steps.defaults.outputs.registry == 'docker_hub' && './docker/Dockerfile' || 'Dockerfile')
|
||||
}}
|
||||
build-args: |
|
||||
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
${{
|
||||
steps.defaults.outputs.registry == 'docker_hub' &&
|
||||
format('flowiseai/flowise{0}:{1}',
|
||||
steps.defaults.outputs.image_type == 'worker' && '-worker' || '',
|
||||
steps.defaults.outputs.tag_version) ||
|
||||
format('{0}.dkr.ecr.{1}.amazonaws.com/flowise{2}:{3}',
|
||||
secrets.AWS_ACCOUNT_ID,
|
||||
secrets.AWS_REGION,
|
||||
steps.defaults.outputs.image_type == 'worker' && '-worker' || '',
|
||||
steps.defaults.outputs.tag_version)
|
||||
}}
|
||||
37
Dockerfile
37
Dockerfile
|
|
@ -5,33 +5,40 @@
|
|||
# docker run -d -p 3000:3000 flowise
|
||||
|
||||
FROM node:20-alpine
|
||||
RUN apk add --update libc6-compat python3 make g++
|
||||
# needed for pdfjs-dist
|
||||
RUN apk add --no-cache build-base cairo-dev pango-dev
|
||||
|
||||
# Install Chromium
|
||||
RUN apk add --no-cache chromium
|
||||
|
||||
# Install curl for container-level health checks
|
||||
# Fixes: https://github.com/FlowiseAI/Flowise/issues/4126
|
||||
RUN apk add --no-cache curl
|
||||
|
||||
#install PNPM globaly
|
||||
RUN npm install -g pnpm
|
||||
# Install system dependencies and build tools
|
||||
RUN apk update && \
|
||||
apk add --no-cache \
|
||||
libc6-compat \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
build-base \
|
||||
cairo-dev \
|
||||
pango-dev \
|
||||
chromium \
|
||||
curl && \
|
||||
npm install -g pnpm
|
||||
|
||||
ENV PUPPETEER_SKIP_DOWNLOAD=true
|
||||
ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium-browser
|
||||
|
||||
ENV NODE_OPTIONS=--max-old-space-size=8192
|
||||
|
||||
WORKDIR /usr/src
|
||||
WORKDIR /usr/src/flowise
|
||||
|
||||
# Copy app source
|
||||
COPY . .
|
||||
|
||||
RUN pnpm install
|
||||
# Install dependencies and build
|
||||
RUN pnpm install && \
|
||||
pnpm build
|
||||
|
||||
RUN pnpm build
|
||||
# Give the node user ownership of the application files
|
||||
RUN chown -R node:node .
|
||||
|
||||
# Switch to non-root user (node user already exists in node:20-alpine)
|
||||
USER node
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
|
|
|
|||
42
SECURITY.md
42
SECURITY.md
|
|
@ -4,35 +4,35 @@ At Flowise, we prioritize security and continuously work to safeguard our system
|
|||
|
||||
### Out of scope vulnerabilities
|
||||
|
||||
- Clickjacking on pages without sensitive actions
|
||||
- CSRF on unauthenticated/logout/login pages
|
||||
- Attacks requiring MITM (Man-in-the-Middle) or physical device access
|
||||
- Social engineering attacks
|
||||
- Activities that cause service disruption (DoS)
|
||||
- Content spoofing and text injection without a valid attack vector
|
||||
- Email spoofing
|
||||
- Absence of DNSSEC, CAA, CSP headers
|
||||
- Missing Secure or HTTP-only flag on non-sensitive cookies
|
||||
- Deadlinks
|
||||
- User enumeration
|
||||
- Clickjacking on pages without sensitive actions
|
||||
- CSRF on unauthenticated/logout/login pages
|
||||
- Attacks requiring MITM (Man-in-the-Middle) or physical device access
|
||||
- Social engineering attacks
|
||||
- Activities that cause service disruption (DoS)
|
||||
- Content spoofing and text injection without a valid attack vector
|
||||
- Email spoofing
|
||||
- Absence of DNSSEC, CAA, CSP headers
|
||||
- Missing Secure or HTTP-only flag on non-sensitive cookies
|
||||
- Deadlinks
|
||||
- User enumeration
|
||||
|
||||
### Reporting Guidelines
|
||||
|
||||
- Submit your findings to https://github.com/FlowiseAI/Flowise/security
|
||||
- Provide clear details to help us reproduce and fix the issue quickly.
|
||||
- Submit your findings to https://github.com/FlowiseAI/Flowise/security
|
||||
- Provide clear details to help us reproduce and fix the issue quickly.
|
||||
|
||||
### Disclosure Guidelines
|
||||
|
||||
- Do not publicly disclose vulnerabilities until we have assessed, resolved, and notified affected users.
|
||||
- If you plan to present your research (e.g., at a conference or in a blog), share a draft with us at least **30 days in advance** for review.
|
||||
- Avoid including:
|
||||
- Data from any Flowise customer projects
|
||||
- Flowise user/customer information
|
||||
- Details about Flowise employees, contractors, or partners
|
||||
- Do not publicly disclose vulnerabilities until we have assessed, resolved, and notified affected users.
|
||||
- If you plan to present your research (e.g., at a conference or in a blog), share a draft with us at least **30 days in advance** for review.
|
||||
- Avoid including:
|
||||
- Data from any Flowise customer projects
|
||||
- Flowise user/customer information
|
||||
- Details about Flowise employees, contractors, or partners
|
||||
|
||||
### Response to Reports
|
||||
|
||||
- We will acknowledge your report within **5 business days** and provide an estimated resolution timeline.
|
||||
- Your report will be kept **confidential**, and your details will not be shared without your consent.
|
||||
- We will acknowledge your report within **5 business days** and provide an estimated resolution timeline.
|
||||
- Your report will be kept **confidential**, and your details will not be shared without your consent.
|
||||
|
||||
We appreciate your efforts in helping us maintain a secure platform and look forward to working together to resolve any issues responsibly.
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ RUN apk add --no-cache build-base cairo-dev pango-dev
|
|||
# Install Chromium and curl for container-level health checks
|
||||
RUN apk add --no-cache chromium curl
|
||||
|
||||
#install PNPM globaly
|
||||
#install PNPM globally
|
||||
RUN npm install -g pnpm
|
||||
|
||||
ENV PUPPETEER_SKIP_DOWNLOAD=true
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "flowise",
|
||||
"version": "10",
|
||||
"version": "3.0.11",
|
||||
"private": true,
|
||||
"homepage": "https://flowiseai.com",
|
||||
"workspaces": [
|
||||
|
|
@ -51,7 +51,7 @@
|
|||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-unused-imports": "^2.0.0",
|
||||
"husky": "^8.0.1",
|
||||
"kill-port": "^2.0.1",
|
||||
"kill-port": "2.0.1",
|
||||
"lint-staged": "^13.0.3",
|
||||
"prettier": "^2.7.1",
|
||||
"pretty-quick": "^3.1.3",
|
||||
|
|
|
|||
|
|
@ -3,6 +3,13 @@
|
|||
{
|
||||
"name": "awsChatBedrock",
|
||||
"models": [
|
||||
{
|
||||
"label": "anthropic.claude-opus-4-5-20251101-v1:0",
|
||||
"name": "anthropic.claude-opus-4-5-20251101-v1:0",
|
||||
"description": "Claude 4.5 Opus",
|
||||
"input_cost": 0.000005,
|
||||
"output_cost": 0.000025
|
||||
},
|
||||
{
|
||||
"label": "anthropic.claude-sonnet-4-5-20250929-v1:0",
|
||||
"name": "anthropic.claude-sonnet-4-5-20250929-v1:0",
|
||||
|
|
@ -315,6 +322,12 @@
|
|||
{
|
||||
"name": "azureChatOpenAI",
|
||||
"models": [
|
||||
{
|
||||
"label": "gpt-5.1",
|
||||
"name": "gpt-5.1",
|
||||
"input_cost": 0.00000125,
|
||||
"output_cost": 0.00001
|
||||
},
|
||||
{
|
||||
"label": "gpt-5",
|
||||
"name": "gpt-5",
|
||||
|
|
@ -499,6 +512,13 @@
|
|||
{
|
||||
"name": "chatAnthropic",
|
||||
"models": [
|
||||
{
|
||||
"label": "claude-opus-4-5",
|
||||
"name": "claude-opus-4-5",
|
||||
"description": "Claude 4.5 Opus",
|
||||
"input_cost": 0.000005,
|
||||
"output_cost": 0.000025
|
||||
},
|
||||
{
|
||||
"label": "claude-sonnet-4-5",
|
||||
"name": "claude-sonnet-4-5",
|
||||
|
|
@ -621,6 +641,18 @@
|
|||
{
|
||||
"name": "chatGoogleGenerativeAI",
|
||||
"models": [
|
||||
{
|
||||
"label": "gemini-3-pro-preview",
|
||||
"name": "gemini-3-pro-preview",
|
||||
"input_cost": 0.00002,
|
||||
"output_cost": 0.00012
|
||||
},
|
||||
{
|
||||
"label": "gemini-3-pro-image-preview",
|
||||
"name": "gemini-3-pro-image-preview",
|
||||
"input_cost": 0.00002,
|
||||
"output_cost": 0.00012
|
||||
},
|
||||
{
|
||||
"label": "gemini-2.5-pro",
|
||||
"name": "gemini-2.5-pro",
|
||||
|
|
@ -633,6 +665,12 @@
|
|||
"input_cost": 1.25e-6,
|
||||
"output_cost": 0.00001
|
||||
},
|
||||
{
|
||||
"label": "gemini-2.5-flash-image",
|
||||
"name": "gemini-2.5-flash-image",
|
||||
"input_cost": 1.25e-6,
|
||||
"output_cost": 0.00001
|
||||
},
|
||||
{
|
||||
"label": "gemini-2.5-flash-lite",
|
||||
"name": "gemini-2.5-flash-lite",
|
||||
|
|
@ -685,6 +723,12 @@
|
|||
{
|
||||
"name": "chatGoogleVertexAI",
|
||||
"models": [
|
||||
{
|
||||
"label": "gemini-3-pro-preview",
|
||||
"name": "gemini-3-pro-preview",
|
||||
"input_cost": 0.00002,
|
||||
"output_cost": 0.00012
|
||||
},
|
||||
{
|
||||
"label": "gemini-2.5-pro",
|
||||
"name": "gemini-2.5-pro",
|
||||
|
|
@ -751,6 +795,13 @@
|
|||
"input_cost": 1.25e-7,
|
||||
"output_cost": 3.75e-7
|
||||
},
|
||||
{
|
||||
"label": "claude-opus-4-5@20251101",
|
||||
"name": "claude-opus-4-5@20251101",
|
||||
"description": "Claude 4.5 Opus",
|
||||
"input_cost": 0.000005,
|
||||
"output_cost": 0.000025
|
||||
},
|
||||
{
|
||||
"label": "claude-sonnet-4-5@20250929",
|
||||
"name": "claude-sonnet-4-5@20250929",
|
||||
|
|
@ -996,6 +1047,12 @@
|
|||
{
|
||||
"name": "chatOpenAI",
|
||||
"models": [
|
||||
{
|
||||
"label": "gpt-5.1",
|
||||
"name": "gpt-5.1",
|
||||
"input_cost": 0.00000125,
|
||||
"output_cost": 0.00001
|
||||
},
|
||||
{
|
||||
"label": "gpt-5",
|
||||
"name": "gpt-5",
|
||||
|
|
|
|||
|
|
@ -22,15 +22,16 @@ import zodToJsonSchema from 'zod-to-json-schema'
|
|||
import { getErrorMessage } from '../../../src/error'
|
||||
import { DataSource } from 'typeorm'
|
||||
import {
|
||||
addImageArtifactsToMessages,
|
||||
extractArtifactsFromResponse,
|
||||
getPastChatHistoryImageMessages,
|
||||
getUniqueImageMessages,
|
||||
processMessagesWithImages,
|
||||
replaceBase64ImagesWithFileReferences,
|
||||
replaceInlineDataWithFileReferences,
|
||||
updateFlowState
|
||||
} from '../utils'
|
||||
import { convertMultiOptionsToStringArray, getCredentialData, getCredentialParam, processTemplateVariables } from '../../../src/utils'
|
||||
import { addSingleFileToStorage } from '../../../src/storageUtils'
|
||||
import fetch from 'node-fetch'
|
||||
import { convertMultiOptionsToStringArray, processTemplateVariables, configureStructuredOutput } from '../../../src/utils'
|
||||
|
||||
interface ITool {
|
||||
agentSelectedTool: string
|
||||
|
|
@ -81,7 +82,7 @@ class Agent_Agentflow implements INode {
|
|||
constructor() {
|
||||
this.label = 'Agent'
|
||||
this.name = 'agentAgentflow'
|
||||
this.version = 2.2
|
||||
this.version = 3.2
|
||||
this.type = 'Agent'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Dynamically choose and utilize tools during runtime, enabling multi-step reasoning'
|
||||
|
|
@ -176,6 +177,11 @@ class Agent_Agentflow implements INode {
|
|||
label: 'Google Search',
|
||||
name: 'googleSearch',
|
||||
description: 'Search real-time web content'
|
||||
},
|
||||
{
|
||||
label: 'Code Execution',
|
||||
name: 'codeExecution',
|
||||
description: 'Write and run Python code in a sandboxed environment'
|
||||
}
|
||||
],
|
||||
show: {
|
||||
|
|
@ -394,6 +400,108 @@ class Agent_Agentflow implements INode {
|
|||
],
|
||||
default: 'userMessage'
|
||||
},
|
||||
{
|
||||
label: 'JSON Structured Output',
|
||||
name: 'agentStructuredOutput',
|
||||
description: 'Instruct the Agent to give output in a JSON structured schema',
|
||||
type: 'array',
|
||||
optional: true,
|
||||
acceptVariable: true,
|
||||
array: [
|
||||
{
|
||||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Type',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'String',
|
||||
name: 'string'
|
||||
},
|
||||
{
|
||||
label: 'String Array',
|
||||
name: 'stringArray'
|
||||
},
|
||||
{
|
||||
label: 'Number',
|
||||
name: 'number'
|
||||
},
|
||||
{
|
||||
label: 'Boolean',
|
||||
name: 'boolean'
|
||||
},
|
||||
{
|
||||
label: 'Enum',
|
||||
name: 'enum'
|
||||
},
|
||||
{
|
||||
label: 'JSON Array',
|
||||
name: 'jsonArray'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Enum Values',
|
||||
name: 'enumValues',
|
||||
type: 'string',
|
||||
placeholder: 'value1, value2, value3',
|
||||
description: 'Enum values. Separated by comma',
|
||||
optional: true,
|
||||
show: {
|
||||
'agentStructuredOutput[$index].type': 'enum'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'JSON Schema',
|
||||
name: 'jsonSchema',
|
||||
type: 'code',
|
||||
placeholder: `{
|
||||
"answer": {
|
||||
"type": "string",
|
||||
"description": "Value of the answer"
|
||||
},
|
||||
"reason": {
|
||||
"type": "string",
|
||||
"description": "Reason for the answer"
|
||||
},
|
||||
"optional": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"count": {
|
||||
"type": "number"
|
||||
},
|
||||
"children": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"value": {
|
||||
"type": "string",
|
||||
"description": "Value of the children's answer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}`,
|
||||
description: 'JSON schema for the structured output',
|
||||
optional: true,
|
||||
hideCodeExecute: true,
|
||||
show: {
|
||||
'agentStructuredOutput[$index].type': 'jsonArray'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Description',
|
||||
name: 'description',
|
||||
type: 'string',
|
||||
placeholder: 'Description of the key'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Update Flow State',
|
||||
name: 'agentUpdateState',
|
||||
|
|
@ -406,8 +514,7 @@ class Agent_Agentflow implements INode {
|
|||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys',
|
||||
freeSolo: true
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
|
|
@ -770,6 +877,7 @@ class Agent_Agentflow implements INode {
|
|||
const memoryType = nodeData.inputs?.agentMemoryType as string
|
||||
const userMessage = nodeData.inputs?.agentUserMessage as string
|
||||
const _agentUpdateState = nodeData.inputs?.agentUpdateState
|
||||
const _agentStructuredOutput = nodeData.inputs?.agentStructuredOutput
|
||||
const agentMessages = (nodeData.inputs?.agentMessages as unknown as ILLMMessage[]) ?? []
|
||||
|
||||
// Extract runtime state and history
|
||||
|
|
@ -795,6 +903,8 @@ class Agent_Agentflow implements INode {
|
|||
const llmWithoutToolsBind = (await newLLMNodeInstance.init(newNodeData, '', options)) as BaseChatModel
|
||||
let llmNodeInstance = llmWithoutToolsBind
|
||||
|
||||
const isStructuredOutput = _agentStructuredOutput && Array.isArray(_agentStructuredOutput) && _agentStructuredOutput.length > 0
|
||||
|
||||
const agentToolsBuiltInOpenAI = convertMultiOptionsToStringArray(nodeData.inputs?.agentToolsBuiltInOpenAI)
|
||||
if (agentToolsBuiltInOpenAI && agentToolsBuiltInOpenAI.length > 0) {
|
||||
for (const tool of agentToolsBuiltInOpenAI) {
|
||||
|
|
@ -953,7 +1063,7 @@ class Agent_Agentflow implements INode {
|
|||
// Initialize response and determine if streaming is possible
|
||||
let response: AIMessageChunk = new AIMessageChunk('')
|
||||
const isLastNode = options.isLastNode as boolean
|
||||
const isStreamable = isLastNode && options.sseStreamer !== undefined && modelConfig?.streaming !== false
|
||||
const isStreamable = isLastNode && options.sseStreamer !== undefined && modelConfig?.streaming !== false && !isStructuredOutput
|
||||
|
||||
// Start analytics
|
||||
if (analyticHandlers && options.parentTraceIds) {
|
||||
|
|
@ -961,12 +1071,6 @@ class Agent_Agentflow implements INode {
|
|||
llmIds = await analyticHandlers.onLLMStart(llmLabel, messages, options.parentTraceIds)
|
||||
}
|
||||
|
||||
// Track execution time
|
||||
const startTime = Date.now()
|
||||
|
||||
// Get initial response from LLM
|
||||
const sseStreamer: IServerSideEventStreamer | undefined = options.sseStreamer
|
||||
|
||||
// Handle tool calls with support for recursion
|
||||
let usedTools: IUsedTool[] = []
|
||||
let sourceDocuments: Array<any> = []
|
||||
|
|
@ -979,12 +1083,24 @@ class Agent_Agentflow implements INode {
|
|||
const messagesBeforeToolCalls = [...messages]
|
||||
let _toolCallMessages: BaseMessageLike[] = []
|
||||
|
||||
/**
|
||||
* Add image artifacts from previous assistant responses as user messages
|
||||
* Images are converted from FILE-STORAGE::<image_path> to base 64 image_url format
|
||||
*/
|
||||
await addImageArtifactsToMessages(messages, options)
|
||||
|
||||
// Check if this is hummanInput for tool calls
|
||||
const _humanInput = nodeData.inputs?.humanInput
|
||||
const humanInput: IHumanInput = typeof _humanInput === 'string' ? JSON.parse(_humanInput) : _humanInput
|
||||
const humanInputAction = options.humanInputAction
|
||||
const iterationContext = options.iterationContext
|
||||
|
||||
// Track execution time
|
||||
const startTime = Date.now()
|
||||
|
||||
// Get initial response from LLM
|
||||
const sseStreamer: IServerSideEventStreamer | undefined = options.sseStreamer
|
||||
|
||||
if (humanInput) {
|
||||
if (humanInput.type !== 'proceed' && humanInput.type !== 'reject') {
|
||||
throw new Error(`Invalid human input type. Expected 'proceed' or 'reject', but got '${humanInput.type}'`)
|
||||
|
|
@ -1002,7 +1118,8 @@ class Agent_Agentflow implements INode {
|
|||
llmWithoutToolsBind,
|
||||
isStreamable,
|
||||
isLastNode,
|
||||
iterationContext
|
||||
iterationContext,
|
||||
isStructuredOutput
|
||||
})
|
||||
|
||||
response = result.response
|
||||
|
|
@ -1031,7 +1148,14 @@ class Agent_Agentflow implements INode {
|
|||
}
|
||||
} else {
|
||||
if (isStreamable) {
|
||||
response = await this.handleStreamingResponse(sseStreamer, llmNodeInstance, messages, chatId, abortController)
|
||||
response = await this.handleStreamingResponse(
|
||||
sseStreamer,
|
||||
llmNodeInstance,
|
||||
messages,
|
||||
chatId,
|
||||
abortController,
|
||||
isStructuredOutput
|
||||
)
|
||||
} else {
|
||||
response = await llmNodeInstance.invoke(messages, { signal: abortController?.signal })
|
||||
}
|
||||
|
|
@ -1053,7 +1177,8 @@ class Agent_Agentflow implements INode {
|
|||
llmNodeInstance,
|
||||
isStreamable,
|
||||
isLastNode,
|
||||
iterationContext
|
||||
iterationContext,
|
||||
isStructuredOutput
|
||||
})
|
||||
|
||||
response = result.response
|
||||
|
|
@ -1080,11 +1205,20 @@ class Agent_Agentflow implements INode {
|
|||
sseStreamer.streamArtifactsEvent(chatId, flatten(artifacts))
|
||||
}
|
||||
}
|
||||
} else if (!humanInput && !isStreamable && isLastNode && sseStreamer) {
|
||||
} else if (!humanInput && !isStreamable && isLastNode && sseStreamer && !isStructuredOutput) {
|
||||
// Stream whole response back to UI if not streaming and no tool calls
|
||||
// Skip this if structured output is enabled - it will be streamed after conversion
|
||||
let finalResponse = ''
|
||||
if (response.content && Array.isArray(response.content)) {
|
||||
finalResponse = response.content.map((item: any) => item.text).join('\n')
|
||||
finalResponse = response.content
|
||||
.map((item: any) => {
|
||||
if ((item.text && !item.type) || (item.type === 'text' && item.text)) {
|
||||
return item.text
|
||||
}
|
||||
return ''
|
||||
})
|
||||
.filter((text: string) => text)
|
||||
.join('\n')
|
||||
} else if (response.content && typeof response.content === 'string') {
|
||||
finalResponse = response.content
|
||||
} else {
|
||||
|
|
@ -1113,9 +1247,53 @@ class Agent_Agentflow implements INode {
|
|||
// Prepare final response and output object
|
||||
let finalResponse = ''
|
||||
if (response.content && Array.isArray(response.content)) {
|
||||
finalResponse = response.content.map((item: any) => item.text).join('\n')
|
||||
// Process items and concatenate consecutive text items
|
||||
const processedParts: string[] = []
|
||||
let currentTextBuffer = ''
|
||||
|
||||
for (const item of response.content) {
|
||||
const itemAny = item as any
|
||||
const isTextItem = (itemAny.text && !itemAny.type) || (itemAny.type === 'text' && itemAny.text)
|
||||
|
||||
if (isTextItem) {
|
||||
// Accumulate consecutive text items
|
||||
currentTextBuffer += itemAny.text
|
||||
} else {
|
||||
// Flush accumulated text before processing other types
|
||||
if (currentTextBuffer) {
|
||||
processedParts.push(currentTextBuffer)
|
||||
currentTextBuffer = ''
|
||||
}
|
||||
|
||||
// Process non-text items
|
||||
if (itemAny.type === 'executableCode' && itemAny.executableCode) {
|
||||
// Format executable code as a code block
|
||||
const language = itemAny.executableCode.language?.toLowerCase() || 'python'
|
||||
processedParts.push(`\n\`\`\`${language}\n${itemAny.executableCode.code}\n\`\`\`\n`)
|
||||
} else if (itemAny.type === 'codeExecutionResult' && itemAny.codeExecutionResult) {
|
||||
// Format code execution result
|
||||
const outcome = itemAny.codeExecutionResult.outcome || 'OUTCOME_OK'
|
||||
const output = itemAny.codeExecutionResult.output || ''
|
||||
if (outcome === 'OUTCOME_OK' && output) {
|
||||
processedParts.push(`**Code Output:**\n\`\`\`\n${output}\n\`\`\`\n`)
|
||||
} else if (outcome !== 'OUTCOME_OK') {
|
||||
processedParts.push(`**Code Execution Error:**\n\`\`\`\n${output}\n\`\`\`\n`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Flush any remaining text
|
||||
if (currentTextBuffer) {
|
||||
processedParts.push(currentTextBuffer)
|
||||
}
|
||||
|
||||
finalResponse = processedParts.filter((text) => text).join('\n')
|
||||
} else if (response.content && typeof response.content === 'string') {
|
||||
finalResponse = response.content
|
||||
} else if (response.content === '') {
|
||||
// Empty response content, this could happen when there is only image data
|
||||
finalResponse = ''
|
||||
} else {
|
||||
finalResponse = JSON.stringify(response, null, 2)
|
||||
}
|
||||
|
|
@ -1131,10 +1309,13 @@ class Agent_Agentflow implements INode {
|
|||
}
|
||||
}
|
||||
|
||||
// Extract artifacts from annotations in response metadata
|
||||
// Extract artifacts from annotations in response metadata and replace inline data
|
||||
if (response.response_metadata) {
|
||||
const { artifacts: extractedArtifacts, fileAnnotations: extractedFileAnnotations } =
|
||||
await this.extractArtifactsFromResponse(response.response_metadata, newNodeData, options)
|
||||
const {
|
||||
artifacts: extractedArtifacts,
|
||||
fileAnnotations: extractedFileAnnotations,
|
||||
savedInlineImages
|
||||
} = await extractArtifactsFromResponse(response.response_metadata, newNodeData, options)
|
||||
if (extractedArtifacts.length > 0) {
|
||||
artifacts = [...artifacts, ...extractedArtifacts]
|
||||
|
||||
|
|
@ -1152,6 +1333,11 @@ class Agent_Agentflow implements INode {
|
|||
sseStreamer.streamFileAnnotationsEvent(chatId, fileAnnotations)
|
||||
}
|
||||
}
|
||||
|
||||
// Replace inlineData base64 with file references in the response
|
||||
if (savedInlineImages && savedInlineImages.length > 0) {
|
||||
replaceInlineDataWithFileReferences(response, savedInlineImages)
|
||||
}
|
||||
}
|
||||
|
||||
// Replace sandbox links with proper download URLs. Example: [Download the script](sandbox:/mnt/data/dummy_bar_graph.py)
|
||||
|
|
@ -1159,6 +1345,23 @@ class Agent_Agentflow implements INode {
|
|||
finalResponse = await this.processSandboxLinks(finalResponse, options.baseURL, options.chatflowid, chatId)
|
||||
}
|
||||
|
||||
// If is structured output, then invoke LLM again with structured output at the very end after all tool calls
|
||||
if (isStructuredOutput) {
|
||||
llmNodeInstance = configureStructuredOutput(llmNodeInstance, _agentStructuredOutput)
|
||||
const prompt = 'Convert the following response to the structured output format: ' + finalResponse
|
||||
response = await llmNodeInstance.invoke(prompt, { signal: abortController?.signal })
|
||||
|
||||
if (typeof response === 'object') {
|
||||
finalResponse = '```json\n' + JSON.stringify(response, null, 2) + '\n```'
|
||||
} else {
|
||||
finalResponse = response
|
||||
}
|
||||
|
||||
if (isLastNode && sseStreamer) {
|
||||
sseStreamer.streamTokenEvent(chatId, finalResponse)
|
||||
}
|
||||
}
|
||||
|
||||
const output = this.prepareOutputObject(
|
||||
response,
|
||||
availableTools,
|
||||
|
|
@ -1171,7 +1374,8 @@ class Agent_Agentflow implements INode {
|
|||
artifacts,
|
||||
additionalTokens,
|
||||
isWaitingForHumanInput,
|
||||
fileAnnotations
|
||||
fileAnnotations,
|
||||
isStructuredOutput
|
||||
)
|
||||
|
||||
// End analytics tracking
|
||||
|
|
@ -1192,9 +1396,15 @@ class Agent_Agentflow implements INode {
|
|||
// Process template variables in state
|
||||
newState = processTemplateVariables(newState, finalResponse)
|
||||
|
||||
/**
|
||||
* Remove the temporarily added image artifact messages before storing
|
||||
* This is to avoid storing the actual base64 data into database
|
||||
*/
|
||||
const messagesToStore = messages.filter((msg: any) => !msg._isTemporaryImageMessage)
|
||||
|
||||
// Replace the actual messages array with one that includes the file references for images instead of base64 data
|
||||
const messagesWithFileReferences = replaceBase64ImagesWithFileReferences(
|
||||
messages,
|
||||
messagesToStore,
|
||||
runtimeImageMessagesWithFileRef,
|
||||
pastImageMessagesWithFileRef
|
||||
)
|
||||
|
|
@ -1333,7 +1543,12 @@ class Agent_Agentflow implements INode {
|
|||
// Handle Gemini googleSearch tool
|
||||
if (groundingMetadata && groundingMetadata.webSearchQueries && Array.isArray(groundingMetadata.webSearchQueries)) {
|
||||
// Check for duplicates
|
||||
if (!builtInUsedTools.find((tool) => tool.tool === 'googleSearch')) {
|
||||
const isDuplicate = builtInUsedTools.find(
|
||||
(tool) =>
|
||||
tool.tool === 'googleSearch' &&
|
||||
JSON.stringify((tool.toolInput as any)?.queries) === JSON.stringify(groundingMetadata.webSearchQueries)
|
||||
)
|
||||
if (!isDuplicate) {
|
||||
builtInUsedTools.push({
|
||||
tool: 'googleSearch',
|
||||
toolInput: {
|
||||
|
|
@ -1347,7 +1562,12 @@ class Agent_Agentflow implements INode {
|
|||
// Handle Gemini urlContext tool
|
||||
if (urlContextMetadata && urlContextMetadata.urlMetadata && Array.isArray(urlContextMetadata.urlMetadata)) {
|
||||
// Check for duplicates
|
||||
if (!builtInUsedTools.find((tool) => tool.tool === 'urlContext')) {
|
||||
const isDuplicate = builtInUsedTools.find(
|
||||
(tool) =>
|
||||
tool.tool === 'urlContext' &&
|
||||
JSON.stringify((tool.toolInput as any)?.urlMetadata) === JSON.stringify(urlContextMetadata.urlMetadata)
|
||||
)
|
||||
if (!isDuplicate) {
|
||||
builtInUsedTools.push({
|
||||
tool: 'urlContext',
|
||||
toolInput: {
|
||||
|
|
@ -1358,45 +1578,53 @@ class Agent_Agentflow implements INode {
|
|||
}
|
||||
}
|
||||
|
||||
return builtInUsedTools
|
||||
}
|
||||
// Handle Gemini codeExecution tool
|
||||
if (response.content && Array.isArray(response.content)) {
|
||||
for (let i = 0; i < response.content.length; i++) {
|
||||
const item = response.content[i]
|
||||
|
||||
/**
|
||||
* Saves base64 image data to storage and returns file information
|
||||
*/
|
||||
private async saveBase64Image(
|
||||
outputItem: any,
|
||||
options: ICommonObject
|
||||
): Promise<{ filePath: string; fileName: string; totalSize: number } | null> {
|
||||
try {
|
||||
if (!outputItem.result) {
|
||||
return null
|
||||
if (item.type === 'executableCode' && item.executableCode) {
|
||||
const language = item.executableCode.language || 'PYTHON'
|
||||
const code = item.executableCode.code || ''
|
||||
let toolOutput = ''
|
||||
|
||||
// Check for duplicates
|
||||
const isDuplicate = builtInUsedTools.find(
|
||||
(tool) =>
|
||||
tool.tool === 'codeExecution' &&
|
||||
(tool.toolInput as any)?.language === language &&
|
||||
(tool.toolInput as any)?.code === code
|
||||
)
|
||||
if (isDuplicate) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check the next item for the output
|
||||
const nextItem = i + 1 < response.content.length ? response.content[i + 1] : null
|
||||
|
||||
if (nextItem) {
|
||||
if (nextItem.type === 'codeExecutionResult' && nextItem.codeExecutionResult) {
|
||||
const outcome = nextItem.codeExecutionResult.outcome
|
||||
const output = nextItem.codeExecutionResult.output || ''
|
||||
toolOutput = outcome === 'OUTCOME_OK' ? output : `Error: ${output}`
|
||||
} else if (nextItem.type === 'inlineData') {
|
||||
toolOutput = 'Generated image data'
|
||||
}
|
||||
}
|
||||
|
||||
builtInUsedTools.push({
|
||||
tool: 'codeExecution',
|
||||
toolInput: {
|
||||
language,
|
||||
code
|
||||
},
|
||||
toolOutput
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Extract base64 data and create buffer
|
||||
const base64Data = outputItem.result
|
||||
const imageBuffer = Buffer.from(base64Data, 'base64')
|
||||
|
||||
// Determine file extension and MIME type
|
||||
const outputFormat = outputItem.output_format || 'png'
|
||||
const fileName = `generated_image_${outputItem.id || Date.now()}.${outputFormat}`
|
||||
const mimeType = outputFormat === 'png' ? 'image/png' : 'image/jpeg'
|
||||
|
||||
// Save the image using the existing storage utility
|
||||
const { path, totalSize } = await addSingleFileToStorage(
|
||||
mimeType,
|
||||
imageBuffer,
|
||||
fileName,
|
||||
options.orgId,
|
||||
options.chatflowid,
|
||||
options.chatId
|
||||
)
|
||||
|
||||
return { filePath: path, fileName, totalSize }
|
||||
} catch (error) {
|
||||
console.error('Error saving base64 image:', error)
|
||||
return null
|
||||
}
|
||||
|
||||
return builtInUsedTools
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -1561,32 +1789,62 @@ class Agent_Agentflow implements INode {
|
|||
llmNodeInstance: BaseChatModel,
|
||||
messages: BaseMessageLike[],
|
||||
chatId: string,
|
||||
abortController: AbortController
|
||||
abortController: AbortController,
|
||||
isStructuredOutput: boolean = false
|
||||
): Promise<AIMessageChunk> {
|
||||
let response = new AIMessageChunk('')
|
||||
|
||||
try {
|
||||
for await (const chunk of await llmNodeInstance.stream(messages, { signal: abortController?.signal })) {
|
||||
if (sseStreamer) {
|
||||
if (sseStreamer && !isStructuredOutput) {
|
||||
let content = ''
|
||||
if (Array.isArray(chunk.content) && chunk.content.length > 0) {
|
||||
const contents = chunk.content as MessageContentText[]
|
||||
content = contents.map((item) => item.text).join('')
|
||||
} else {
|
||||
|
||||
if (typeof chunk === 'string') {
|
||||
content = chunk
|
||||
} else if (Array.isArray(chunk.content) && chunk.content.length > 0) {
|
||||
content = chunk.content
|
||||
.map((item: any) => {
|
||||
if ((item.text && !item.type) || (item.type === 'text' && item.text)) {
|
||||
return item.text
|
||||
} else if (item.type === 'executableCode' && item.executableCode) {
|
||||
const language = item.executableCode.language?.toLowerCase() || 'python'
|
||||
return `\n\`\`\`${language}\n${item.executableCode.code}\n\`\`\`\n`
|
||||
} else if (item.type === 'codeExecutionResult' && item.codeExecutionResult) {
|
||||
const outcome = item.codeExecutionResult.outcome || 'OUTCOME_OK'
|
||||
const output = item.codeExecutionResult.output || ''
|
||||
if (outcome === 'OUTCOME_OK' && output) {
|
||||
return `**Code Output:**\n\`\`\`\n${output}\n\`\`\`\n`
|
||||
} else if (outcome !== 'OUTCOME_OK') {
|
||||
return `**Code Execution Error:**\n\`\`\`\n${output}\n\`\`\`\n`
|
||||
}
|
||||
}
|
||||
return ''
|
||||
})
|
||||
.filter((text: string) => text)
|
||||
.join('')
|
||||
} else if (chunk.content) {
|
||||
content = chunk.content.toString()
|
||||
}
|
||||
sseStreamer.streamTokenEvent(chatId, content)
|
||||
}
|
||||
|
||||
response = response.concat(chunk)
|
||||
const messageChunk = typeof chunk === 'string' ? new AIMessageChunk(chunk) : chunk
|
||||
response = response.concat(messageChunk)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error during streaming:', error)
|
||||
throw error
|
||||
}
|
||||
|
||||
// Only convert to string if all content items are text (no inlineData or other special types)
|
||||
if (Array.isArray(response.content) && response.content.length > 0) {
|
||||
const responseContents = response.content as MessageContentText[]
|
||||
response.content = responseContents.map((item) => item.text).join('')
|
||||
const hasNonTextContent = response.content.some(
|
||||
(item: any) => item.type === 'inlineData' || item.type === 'executableCode' || item.type === 'codeExecutionResult'
|
||||
)
|
||||
if (!hasNonTextContent) {
|
||||
const responseContents = response.content as MessageContentText[]
|
||||
response.content = responseContents.map((item) => item.text).join('')
|
||||
}
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
|
@ -1606,7 +1864,8 @@ class Agent_Agentflow implements INode {
|
|||
artifacts: any[],
|
||||
additionalTokens: number = 0,
|
||||
isWaitingForHumanInput: boolean = false,
|
||||
fileAnnotations: any[] = []
|
||||
fileAnnotations: any[] = [],
|
||||
isStructuredOutput: boolean = false
|
||||
): any {
|
||||
const output: any = {
|
||||
content: finalResponse,
|
||||
|
|
@ -1641,6 +1900,15 @@ class Agent_Agentflow implements INode {
|
|||
output.responseMetadata = response.response_metadata
|
||||
}
|
||||
|
||||
if (isStructuredOutput && typeof response === 'object') {
|
||||
const structuredOutput = response as Record<string, any>
|
||||
for (const key in structuredOutput) {
|
||||
if (structuredOutput[key] !== undefined && structuredOutput[key] !== null) {
|
||||
output[key] = structuredOutput[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add used tools, source documents and artifacts to output
|
||||
if (usedTools && usedTools.length > 0) {
|
||||
output.usedTools = flatten(usedTools)
|
||||
|
|
@ -1706,7 +1974,8 @@ class Agent_Agentflow implements INode {
|
|||
llmNodeInstance,
|
||||
isStreamable,
|
||||
isLastNode,
|
||||
iterationContext
|
||||
iterationContext,
|
||||
isStructuredOutput = false
|
||||
}: {
|
||||
response: AIMessageChunk
|
||||
messages: BaseMessageLike[]
|
||||
|
|
@ -1720,6 +1989,7 @@ class Agent_Agentflow implements INode {
|
|||
isStreamable: boolean
|
||||
isLastNode: boolean
|
||||
iterationContext: ICommonObject
|
||||
isStructuredOutput?: boolean
|
||||
}): Promise<{
|
||||
response: AIMessageChunk
|
||||
usedTools: IUsedTool[]
|
||||
|
|
@ -1799,7 +2069,9 @@ class Agent_Agentflow implements INode {
|
|||
const toolCallDetails = '```json\n' + JSON.stringify(toolCall, null, 2) + '\n```'
|
||||
const responseContent = response.content + `\nAttempting to use tool:\n${toolCallDetails}`
|
||||
response.content = responseContent
|
||||
sseStreamer?.streamTokenEvent(chatId, responseContent)
|
||||
if (!isStructuredOutput) {
|
||||
sseStreamer?.streamTokenEvent(chatId, responseContent)
|
||||
}
|
||||
return { response, usedTools, sourceDocuments, artifacts, totalTokens, isWaitingForHumanInput: true }
|
||||
}
|
||||
|
||||
|
|
@ -1905,7 +2177,7 @@ class Agent_Agentflow implements INode {
|
|||
const lastToolOutput = usedTools[0]?.toolOutput || ''
|
||||
const lastToolOutputString = typeof lastToolOutput === 'string' ? lastToolOutput : JSON.stringify(lastToolOutput, null, 2)
|
||||
|
||||
if (sseStreamer) {
|
||||
if (sseStreamer && !isStructuredOutput) {
|
||||
sseStreamer.streamTokenEvent(chatId, lastToolOutputString)
|
||||
}
|
||||
|
||||
|
|
@ -1934,12 +2206,19 @@ class Agent_Agentflow implements INode {
|
|||
let newResponse: AIMessageChunk
|
||||
|
||||
if (isStreamable) {
|
||||
newResponse = await this.handleStreamingResponse(sseStreamer, llmNodeInstance, messages, chatId, abortController)
|
||||
newResponse = await this.handleStreamingResponse(
|
||||
sseStreamer,
|
||||
llmNodeInstance,
|
||||
messages,
|
||||
chatId,
|
||||
abortController,
|
||||
isStructuredOutput
|
||||
)
|
||||
} else {
|
||||
newResponse = await llmNodeInstance.invoke(messages, { signal: abortController?.signal })
|
||||
|
||||
// Stream non-streaming response if this is the last node
|
||||
if (isLastNode && sseStreamer) {
|
||||
if (isLastNode && sseStreamer && !isStructuredOutput) {
|
||||
let responseContent = JSON.stringify(newResponse, null, 2)
|
||||
if (typeof newResponse.content === 'string') {
|
||||
responseContent = newResponse.content
|
||||
|
|
@ -1974,7 +2253,8 @@ class Agent_Agentflow implements INode {
|
|||
llmNodeInstance,
|
||||
isStreamable,
|
||||
isLastNode,
|
||||
iterationContext
|
||||
iterationContext,
|
||||
isStructuredOutput
|
||||
})
|
||||
|
||||
// Merge results from recursive tool calls
|
||||
|
|
@ -2005,7 +2285,8 @@ class Agent_Agentflow implements INode {
|
|||
llmWithoutToolsBind,
|
||||
isStreamable,
|
||||
isLastNode,
|
||||
iterationContext
|
||||
iterationContext,
|
||||
isStructuredOutput = false
|
||||
}: {
|
||||
humanInput: IHumanInput
|
||||
humanInputAction: Record<string, any> | undefined
|
||||
|
|
@ -2020,6 +2301,7 @@ class Agent_Agentflow implements INode {
|
|||
isStreamable: boolean
|
||||
isLastNode: boolean
|
||||
iterationContext: ICommonObject
|
||||
isStructuredOutput?: boolean
|
||||
}): Promise<{
|
||||
response: AIMessageChunk
|
||||
usedTools: IUsedTool[]
|
||||
|
|
@ -2222,7 +2504,7 @@ class Agent_Agentflow implements INode {
|
|||
const lastToolOutput = usedTools[0]?.toolOutput || ''
|
||||
const lastToolOutputString = typeof lastToolOutput === 'string' ? lastToolOutput : JSON.stringify(lastToolOutput, null, 2)
|
||||
|
||||
if (sseStreamer) {
|
||||
if (sseStreamer && !isStructuredOutput) {
|
||||
sseStreamer.streamTokenEvent(chatId, lastToolOutputString)
|
||||
}
|
||||
|
||||
|
|
@ -2253,12 +2535,19 @@ class Agent_Agentflow implements INode {
|
|||
}
|
||||
|
||||
if (isStreamable) {
|
||||
newResponse = await this.handleStreamingResponse(sseStreamer, llmNodeInstance, messages, chatId, abortController)
|
||||
newResponse = await this.handleStreamingResponse(
|
||||
sseStreamer,
|
||||
llmNodeInstance,
|
||||
messages,
|
||||
chatId,
|
||||
abortController,
|
||||
isStructuredOutput
|
||||
)
|
||||
} else {
|
||||
newResponse = await llmNodeInstance.invoke(messages, { signal: abortController?.signal })
|
||||
|
||||
// Stream non-streaming response if this is the last node
|
||||
if (isLastNode && sseStreamer) {
|
||||
if (isLastNode && sseStreamer && !isStructuredOutput) {
|
||||
let responseContent = JSON.stringify(newResponse, null, 2)
|
||||
if (typeof newResponse.content === 'string') {
|
||||
responseContent = newResponse.content
|
||||
|
|
@ -2293,7 +2582,8 @@ class Agent_Agentflow implements INode {
|
|||
llmNodeInstance,
|
||||
isStreamable,
|
||||
isLastNode,
|
||||
iterationContext
|
||||
iterationContext,
|
||||
isStructuredOutput
|
||||
})
|
||||
|
||||
// Merge results from recursive tool calls
|
||||
|
|
@ -2308,190 +2598,6 @@ class Agent_Agentflow implements INode {
|
|||
return { response: newResponse, usedTools, sourceDocuments, artifacts, totalTokens, isWaitingForHumanInput }
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts artifacts from response metadata (both annotations and built-in tools)
|
||||
*/
|
||||
private async extractArtifactsFromResponse(
|
||||
responseMetadata: any,
|
||||
modelNodeData: INodeData,
|
||||
options: ICommonObject
|
||||
): Promise<{ artifacts: any[]; fileAnnotations: any[] }> {
|
||||
const artifacts: any[] = []
|
||||
const fileAnnotations: any[] = []
|
||||
|
||||
if (!responseMetadata?.output || !Array.isArray(responseMetadata.output)) {
|
||||
return { artifacts, fileAnnotations }
|
||||
}
|
||||
|
||||
for (const outputItem of responseMetadata.output) {
|
||||
// Handle container file citations from annotations
|
||||
if (outputItem.type === 'message' && outputItem.content && Array.isArray(outputItem.content)) {
|
||||
for (const contentItem of outputItem.content) {
|
||||
if (contentItem.annotations && Array.isArray(contentItem.annotations)) {
|
||||
for (const annotation of contentItem.annotations) {
|
||||
if (annotation.type === 'container_file_citation' && annotation.file_id && annotation.filename) {
|
||||
try {
|
||||
// Download and store the file content
|
||||
const downloadResult = await this.downloadContainerFile(
|
||||
annotation.container_id,
|
||||
annotation.file_id,
|
||||
annotation.filename,
|
||||
modelNodeData,
|
||||
options
|
||||
)
|
||||
|
||||
if (downloadResult) {
|
||||
const fileType = this.getArtifactTypeFromFilename(annotation.filename)
|
||||
|
||||
if (fileType === 'png' || fileType === 'jpeg' || fileType === 'jpg') {
|
||||
const artifact = {
|
||||
type: fileType,
|
||||
data: downloadResult.filePath
|
||||
}
|
||||
|
||||
artifacts.push(artifact)
|
||||
} else {
|
||||
fileAnnotations.push({
|
||||
filePath: downloadResult.filePath,
|
||||
fileName: annotation.filename
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing annotation:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle built-in tool artifacts (like image generation)
|
||||
if (outputItem.type === 'image_generation_call' && outputItem.result) {
|
||||
try {
|
||||
const savedImageResult = await this.saveBase64Image(outputItem, options)
|
||||
if (savedImageResult) {
|
||||
// Replace the base64 result with the file path in the response metadata
|
||||
outputItem.result = savedImageResult.filePath
|
||||
|
||||
// Create artifact in the same format as other image artifacts
|
||||
const fileType = this.getArtifactTypeFromFilename(savedImageResult.fileName)
|
||||
artifacts.push({
|
||||
type: fileType,
|
||||
data: savedImageResult.filePath
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing image generation artifact:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { artifacts, fileAnnotations }
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads file content from container file citation
|
||||
*/
|
||||
private async downloadContainerFile(
|
||||
containerId: string,
|
||||
fileId: string,
|
||||
filename: string,
|
||||
modelNodeData: INodeData,
|
||||
options: ICommonObject
|
||||
): Promise<{ filePath: string; totalSize: number } | null> {
|
||||
try {
|
||||
const credentialData = await getCredentialData(modelNodeData.credential ?? '', options)
|
||||
const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, modelNodeData)
|
||||
|
||||
if (!openAIApiKey) {
|
||||
console.warn('No OpenAI API key available for downloading container file')
|
||||
return null
|
||||
}
|
||||
|
||||
// Download the file using OpenAI Container API
|
||||
const response = await fetch(`https://api.openai.com/v1/containers/${containerId}/files/${fileId}/content`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: '*/*',
|
||||
Authorization: `Bearer ${openAIApiKey}`
|
||||
}
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
console.warn(
|
||||
`Failed to download container file ${fileId} from container ${containerId}: ${response.status} ${response.statusText}`
|
||||
)
|
||||
return null
|
||||
}
|
||||
|
||||
// Extract the binary data from the Response object
|
||||
const data = await response.arrayBuffer()
|
||||
const dataBuffer = Buffer.from(data)
|
||||
const mimeType = this.getMimeTypeFromFilename(filename)
|
||||
|
||||
// Store the file using the same storage utility as OpenAIAssistant
|
||||
const { path, totalSize } = await addSingleFileToStorage(
|
||||
mimeType,
|
||||
dataBuffer,
|
||||
filename,
|
||||
options.orgId,
|
||||
options.chatflowid,
|
||||
options.chatId
|
||||
)
|
||||
|
||||
return { filePath: path, totalSize }
|
||||
} catch (error) {
|
||||
console.error('Error downloading container file:', error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets MIME type from filename extension
|
||||
*/
|
||||
private getMimeTypeFromFilename(filename: string): string {
|
||||
const extension = filename.toLowerCase().split('.').pop()
|
||||
const mimeTypes: { [key: string]: string } = {
|
||||
png: 'image/png',
|
||||
jpg: 'image/jpeg',
|
||||
jpeg: 'image/jpeg',
|
||||
gif: 'image/gif',
|
||||
pdf: 'application/pdf',
|
||||
txt: 'text/plain',
|
||||
csv: 'text/csv',
|
||||
json: 'application/json',
|
||||
html: 'text/html',
|
||||
xml: 'application/xml'
|
||||
}
|
||||
return mimeTypes[extension || ''] || 'application/octet-stream'
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets artifact type from filename extension for UI rendering
|
||||
*/
|
||||
private getArtifactTypeFromFilename(filename: string): string {
|
||||
const extension = filename.toLowerCase().split('.').pop()
|
||||
const artifactTypes: { [key: string]: string } = {
|
||||
png: 'png',
|
||||
jpg: 'jpeg',
|
||||
jpeg: 'jpeg',
|
||||
html: 'html',
|
||||
htm: 'html',
|
||||
md: 'markdown',
|
||||
markdown: 'markdown',
|
||||
json: 'json',
|
||||
js: 'javascript',
|
||||
javascript: 'javascript',
|
||||
tex: 'latex',
|
||||
latex: 'latex',
|
||||
txt: 'text',
|
||||
csv: 'text',
|
||||
pdf: 'text'
|
||||
}
|
||||
return artifactTypes[extension || ''] || 'text'
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes sandbox links in the response text and converts them to file annotations
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -317,7 +317,7 @@ class Condition_Agentflow implements INode {
|
|||
}
|
||||
}
|
||||
|
||||
// If no condition is fullfilled, add isFulfilled to the ELSE condition
|
||||
// If no condition is fulfilled, add isFulfilled to the ELSE condition
|
||||
const dummyElseConditionData = {
|
||||
type: 'string',
|
||||
value1: '',
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ class CustomFunction_Agentflow implements INode {
|
|||
constructor() {
|
||||
this.label = 'Custom Function'
|
||||
this.name = 'customFunctionAgentflow'
|
||||
this.version = 1.0
|
||||
this.version = 1.1
|
||||
this.type = 'CustomFunction'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Execute custom function'
|
||||
|
|
@ -107,8 +107,7 @@ class CustomFunction_Agentflow implements INode {
|
|||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys',
|
||||
freeSolo: true
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
|
|
@ -134,7 +133,7 @@ class CustomFunction_Agentflow implements INode {
|
|||
|
||||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
|
||||
const javascriptFunction = nodeData.inputs?.customFunctionJavascriptFunction as string
|
||||
const functionInputVariables = nodeData.inputs?.customFunctionInputVariables as ICustomFunctionInputVariables[]
|
||||
const functionInputVariables = (nodeData.inputs?.customFunctionInputVariables as ICustomFunctionInputVariables[]) ?? []
|
||||
const _customFunctionUpdateState = nodeData.inputs?.customFunctionUpdateState
|
||||
|
||||
const state = options.agentflowRuntime?.state as ICommonObject
|
||||
|
|
@ -147,11 +146,17 @@ class CustomFunction_Agentflow implements INode {
|
|||
|
||||
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
||||
const flow = {
|
||||
input,
|
||||
state,
|
||||
chatflowId: options.chatflowid,
|
||||
sessionId: options.sessionId,
|
||||
chatId: options.chatId,
|
||||
input,
|
||||
state
|
||||
rawOutput: options.postProcessing?.rawOutput || '',
|
||||
chatHistory: options.postProcessing?.chatHistory || [],
|
||||
sourceDocuments: options.postProcessing?.sourceDocuments,
|
||||
usedTools: options.postProcessing?.usedTools,
|
||||
artifacts: options.postProcessing?.artifacts,
|
||||
fileAnnotations: options.postProcessing?.fileAnnotations
|
||||
}
|
||||
|
||||
// Create additional sandbox variables for custom function inputs
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ class ExecuteFlow_Agentflow implements INode {
|
|||
constructor() {
|
||||
this.label = 'Execute Flow'
|
||||
this.name = 'executeFlowAgentflow'
|
||||
this.version = 1.1
|
||||
this.version = 1.2
|
||||
this.type = 'ExecuteFlow'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Execute another flow'
|
||||
|
|
@ -102,8 +102,7 @@ class ExecuteFlow_Agentflow implements INode {
|
|||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys',
|
||||
freeSolo: true
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
|
|
|
|||
|
|
@ -241,8 +241,11 @@ class HumanInput_Agentflow implements INode {
|
|||
if (isStreamable) {
|
||||
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
|
||||
for await (const chunk of await llmNodeInstance.stream(messages)) {
|
||||
sseStreamer.streamTokenEvent(chatId, chunk.content.toString())
|
||||
response = response.concat(chunk)
|
||||
const content = typeof chunk === 'string' ? chunk : chunk.content.toString()
|
||||
sseStreamer.streamTokenEvent(chatId, content)
|
||||
|
||||
const messageChunk = typeof chunk === 'string' ? new AIMessageChunk(chunk) : chunk
|
||||
response = response.concat(messageChunk)
|
||||
}
|
||||
humanInputDescription = response.content as string
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -2,17 +2,19 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
|||
import { ICommonObject, IMessage, INode, INodeData, INodeOptionsValue, INodeParams, IServerSideEventStreamer } from '../../../src/Interface'
|
||||
import { AIMessageChunk, BaseMessageLike, MessageContentText } from '@langchain/core/messages'
|
||||
import { DEFAULT_SUMMARIZER_TEMPLATE } from '../prompt'
|
||||
import { z } from 'zod'
|
||||
import { AnalyticHandler } from '../../../src/handler'
|
||||
import { ILLMMessage, IStructuredOutput } from '../Interface.Agentflow'
|
||||
import { ILLMMessage } from '../Interface.Agentflow'
|
||||
import {
|
||||
addImageArtifactsToMessages,
|
||||
extractArtifactsFromResponse,
|
||||
getPastChatHistoryImageMessages,
|
||||
getUniqueImageMessages,
|
||||
processMessagesWithImages,
|
||||
replaceBase64ImagesWithFileReferences,
|
||||
replaceInlineDataWithFileReferences,
|
||||
updateFlowState
|
||||
} from '../utils'
|
||||
import { processTemplateVariables } from '../../../src/utils'
|
||||
import { processTemplateVariables, configureStructuredOutput } from '../../../src/utils'
|
||||
import { flatten } from 'lodash'
|
||||
|
||||
class LLM_Agentflow implements INode {
|
||||
|
|
@ -32,7 +34,7 @@ class LLM_Agentflow implements INode {
|
|||
constructor() {
|
||||
this.label = 'LLM'
|
||||
this.name = 'llmAgentflow'
|
||||
this.version = 1.0
|
||||
this.version = 1.1
|
||||
this.type = 'LLM'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Large language models to analyze user-provided inputs and generate responses'
|
||||
|
|
@ -288,8 +290,7 @@ class LLM_Agentflow implements INode {
|
|||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys',
|
||||
freeSolo: true
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
|
|
@ -449,10 +450,16 @@ class LLM_Agentflow implements INode {
|
|||
}
|
||||
delete nodeData.inputs?.llmMessages
|
||||
|
||||
/**
|
||||
* Add image artifacts from previous assistant responses as user messages
|
||||
* Images are converted from FILE-STORAGE::<image_path> to base 64 image_url format
|
||||
*/
|
||||
await addImageArtifactsToMessages(messages, options)
|
||||
|
||||
// Configure structured output if specified
|
||||
const isStructuredOutput = _llmStructuredOutput && Array.isArray(_llmStructuredOutput) && _llmStructuredOutput.length > 0
|
||||
if (isStructuredOutput) {
|
||||
llmNodeInstance = this.configureStructuredOutput(llmNodeInstance, _llmStructuredOutput)
|
||||
llmNodeInstance = configureStructuredOutput(llmNodeInstance, _llmStructuredOutput)
|
||||
}
|
||||
|
||||
// Initialize response and determine if streaming is possible
|
||||
|
|
@ -468,9 +475,11 @@ class LLM_Agentflow implements INode {
|
|||
|
||||
// Track execution time
|
||||
const startTime = Date.now()
|
||||
|
||||
const sseStreamer: IServerSideEventStreamer | undefined = options.sseStreamer
|
||||
|
||||
/*
|
||||
* Invoke LLM
|
||||
*/
|
||||
if (isStreamable) {
|
||||
response = await this.handleStreamingResponse(sseStreamer, llmNodeInstance, messages, chatId, abortController)
|
||||
} else {
|
||||
|
|
@ -495,6 +504,40 @@ class LLM_Agentflow implements INode {
|
|||
const endTime = Date.now()
|
||||
const timeDelta = endTime - startTime
|
||||
|
||||
// Extract artifacts and file annotations from response metadata
|
||||
let artifacts: any[] = []
|
||||
let fileAnnotations: any[] = []
|
||||
if (response.response_metadata) {
|
||||
const {
|
||||
artifacts: extractedArtifacts,
|
||||
fileAnnotations: extractedFileAnnotations,
|
||||
savedInlineImages
|
||||
} = await extractArtifactsFromResponse(response.response_metadata, newNodeData, options)
|
||||
|
||||
if (extractedArtifacts.length > 0) {
|
||||
artifacts = extractedArtifacts
|
||||
|
||||
// Stream artifacts if this is the last node
|
||||
if (isLastNode && sseStreamer) {
|
||||
sseStreamer.streamArtifactsEvent(chatId, artifacts)
|
||||
}
|
||||
}
|
||||
|
||||
if (extractedFileAnnotations.length > 0) {
|
||||
fileAnnotations = extractedFileAnnotations
|
||||
|
||||
// Stream file annotations if this is the last node
|
||||
if (isLastNode && sseStreamer) {
|
||||
sseStreamer.streamFileAnnotationsEvent(chatId, fileAnnotations)
|
||||
}
|
||||
}
|
||||
|
||||
// Replace inlineData base64 with file references in the response
|
||||
if (savedInlineImages && savedInlineImages.length > 0) {
|
||||
replaceInlineDataWithFileReferences(response, savedInlineImages)
|
||||
}
|
||||
}
|
||||
|
||||
// Update flow state if needed
|
||||
let newState = { ...state }
|
||||
if (_llmUpdateState && Array.isArray(_llmUpdateState) && _llmUpdateState.length > 0) {
|
||||
|
|
@ -514,10 +557,22 @@ class LLM_Agentflow implements INode {
|
|||
finalResponse = response.content.map((item: any) => item.text).join('\n')
|
||||
} else if (response.content && typeof response.content === 'string') {
|
||||
finalResponse = response.content
|
||||
} else if (response.content === '') {
|
||||
// Empty response content, this could happen when there is only image data
|
||||
finalResponse = ''
|
||||
} else {
|
||||
finalResponse = JSON.stringify(response, null, 2)
|
||||
}
|
||||
const output = this.prepareOutputObject(response, finalResponse, startTime, endTime, timeDelta, isStructuredOutput)
|
||||
const output = this.prepareOutputObject(
|
||||
response,
|
||||
finalResponse,
|
||||
startTime,
|
||||
endTime,
|
||||
timeDelta,
|
||||
isStructuredOutput,
|
||||
artifacts,
|
||||
fileAnnotations
|
||||
)
|
||||
|
||||
// End analytics tracking
|
||||
if (analyticHandlers && llmIds) {
|
||||
|
|
@ -529,12 +584,23 @@ class LLM_Agentflow implements INode {
|
|||
this.sendStreamingEvents(options, chatId, response)
|
||||
}
|
||||
|
||||
// Stream file annotations if any were extracted
|
||||
if (fileAnnotations.length > 0 && isLastNode && sseStreamer) {
|
||||
sseStreamer.streamFileAnnotationsEvent(chatId, fileAnnotations)
|
||||
}
|
||||
|
||||
// Process template variables in state
|
||||
newState = processTemplateVariables(newState, finalResponse)
|
||||
|
||||
/**
|
||||
* Remove the temporarily added image artifact messages before storing
|
||||
* This is to avoid storing the actual base64 data into database
|
||||
*/
|
||||
const messagesToStore = messages.filter((msg: any) => !msg._isTemporaryImageMessage)
|
||||
|
||||
// Replace the actual messages array with one that includes the file references for images instead of base64 data
|
||||
const messagesWithFileReferences = replaceBase64ImagesWithFileReferences(
|
||||
messages,
|
||||
messagesToStore,
|
||||
runtimeImageMessagesWithFileRef,
|
||||
pastImageMessagesWithFileRef
|
||||
)
|
||||
|
|
@ -585,7 +651,13 @@ class LLM_Agentflow implements INode {
|
|||
{
|
||||
role: returnRole,
|
||||
content: finalResponse,
|
||||
name: nodeData?.label ? nodeData?.label.toLowerCase().replace(/\s/g, '_').trim() : nodeData?.id
|
||||
name: nodeData?.label ? nodeData?.label.toLowerCase().replace(/\s/g, '_').trim() : nodeData?.id,
|
||||
...(((artifacts && artifacts.length > 0) || (fileAnnotations && fileAnnotations.length > 0)) && {
|
||||
additional_kwargs: {
|
||||
...(artifacts && artifacts.length > 0 && { artifacts }),
|
||||
...(fileAnnotations && fileAnnotations.length > 0 && { fileAnnotations })
|
||||
}
|
||||
})
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -755,59 +827,6 @@ class LLM_Agentflow implements INode {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures structured output for the LLM
|
||||
*/
|
||||
private configureStructuredOutput(llmNodeInstance: BaseChatModel, llmStructuredOutput: IStructuredOutput[]): BaseChatModel {
|
||||
try {
|
||||
const zodObj: ICommonObject = {}
|
||||
for (const sch of llmStructuredOutput) {
|
||||
if (sch.type === 'string') {
|
||||
zodObj[sch.key] = z.string().describe(sch.description || '')
|
||||
} else if (sch.type === 'stringArray') {
|
||||
zodObj[sch.key] = z.array(z.string()).describe(sch.description || '')
|
||||
} else if (sch.type === 'number') {
|
||||
zodObj[sch.key] = z.number().describe(sch.description || '')
|
||||
} else if (sch.type === 'boolean') {
|
||||
zodObj[sch.key] = z.boolean().describe(sch.description || '')
|
||||
} else if (sch.type === 'enum') {
|
||||
const enumValues = sch.enumValues?.split(',').map((item: string) => item.trim()) || []
|
||||
zodObj[sch.key] = z
|
||||
.enum(enumValues.length ? (enumValues as [string, ...string[]]) : ['default'])
|
||||
.describe(sch.description || '')
|
||||
} else if (sch.type === 'jsonArray') {
|
||||
const jsonSchema = sch.jsonSchema
|
||||
if (jsonSchema) {
|
||||
try {
|
||||
// Parse the JSON schema
|
||||
const schemaObj = JSON.parse(jsonSchema)
|
||||
|
||||
// Create a Zod schema from the JSON schema
|
||||
const itemSchema = this.createZodSchemaFromJSON(schemaObj)
|
||||
|
||||
// Create an array schema of the item schema
|
||||
zodObj[sch.key] = z.array(itemSchema).describe(sch.description || '')
|
||||
} catch (err) {
|
||||
console.error(`Error parsing JSON schema for ${sch.key}:`, err)
|
||||
// Fallback to generic array of records
|
||||
zodObj[sch.key] = z.array(z.record(z.any())).describe(sch.description || '')
|
||||
}
|
||||
} else {
|
||||
// If no schema provided, use generic array of records
|
||||
zodObj[sch.key] = z.array(z.record(z.any())).describe(sch.description || '')
|
||||
}
|
||||
}
|
||||
}
|
||||
const structuredOutput = z.object(zodObj)
|
||||
|
||||
// @ts-ignore
|
||||
return llmNodeInstance.withStructuredOutput(structuredOutput)
|
||||
} catch (exception) {
|
||||
console.error(exception)
|
||||
return llmNodeInstance
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles streaming response from the LLM
|
||||
*/
|
||||
|
|
@ -824,16 +843,20 @@ class LLM_Agentflow implements INode {
|
|||
for await (const chunk of await llmNodeInstance.stream(messages, { signal: abortController?.signal })) {
|
||||
if (sseStreamer) {
|
||||
let content = ''
|
||||
if (Array.isArray(chunk.content) && chunk.content.length > 0) {
|
||||
|
||||
if (typeof chunk === 'string') {
|
||||
content = chunk
|
||||
} else if (Array.isArray(chunk.content) && chunk.content.length > 0) {
|
||||
const contents = chunk.content as MessageContentText[]
|
||||
content = contents.map((item) => item.text).join('')
|
||||
} else {
|
||||
} else if (chunk.content) {
|
||||
content = chunk.content.toString()
|
||||
}
|
||||
sseStreamer.streamTokenEvent(chatId, content)
|
||||
}
|
||||
|
||||
response = response.concat(chunk)
|
||||
const messageChunk = typeof chunk === 'string' ? new AIMessageChunk(chunk) : chunk
|
||||
response = response.concat(messageChunk)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error during streaming:', error)
|
||||
|
|
@ -855,7 +878,9 @@ class LLM_Agentflow implements INode {
|
|||
startTime: number,
|
||||
endTime: number,
|
||||
timeDelta: number,
|
||||
isStructuredOutput: boolean
|
||||
isStructuredOutput: boolean,
|
||||
artifacts: any[] = [],
|
||||
fileAnnotations: any[] = []
|
||||
): any {
|
||||
const output: any = {
|
||||
content: finalResponse,
|
||||
|
|
@ -874,6 +899,10 @@ class LLM_Agentflow implements INode {
|
|||
output.usageMetadata = response.usage_metadata
|
||||
}
|
||||
|
||||
if (response.response_metadata) {
|
||||
output.responseMetadata = response.response_metadata
|
||||
}
|
||||
|
||||
if (isStructuredOutput && typeof response === 'object') {
|
||||
const structuredOutput = response as Record<string, any>
|
||||
for (const key in structuredOutput) {
|
||||
|
|
@ -883,6 +912,14 @@ class LLM_Agentflow implements INode {
|
|||
}
|
||||
}
|
||||
|
||||
if (artifacts && artifacts.length > 0) {
|
||||
output.artifacts = flatten(artifacts)
|
||||
}
|
||||
|
||||
if (fileAnnotations && fileAnnotations.length > 0) {
|
||||
output.fileAnnotations = fileAnnotations
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
|
|
@ -907,107 +944,6 @@ class LLM_Agentflow implements INode {
|
|||
|
||||
sseStreamer.streamEndEvent(chatId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Zod schema from a JSON schema object
|
||||
* @param jsonSchema The JSON schema object
|
||||
* @returns A Zod schema
|
||||
*/
|
||||
private createZodSchemaFromJSON(jsonSchema: any): z.ZodTypeAny {
|
||||
// If the schema is an object with properties, create an object schema
|
||||
if (typeof jsonSchema === 'object' && jsonSchema !== null) {
|
||||
const schemaObj: Record<string, z.ZodTypeAny> = {}
|
||||
|
||||
// Process each property in the schema
|
||||
for (const [key, value] of Object.entries(jsonSchema)) {
|
||||
if (value === null) {
|
||||
// Handle null values
|
||||
schemaObj[key] = z.null()
|
||||
} else if (typeof value === 'object' && !Array.isArray(value)) {
|
||||
// Check if the property has a type definition
|
||||
if ('type' in value) {
|
||||
const type = value.type as string
|
||||
const description = ('description' in value ? (value.description as string) : '') || ''
|
||||
|
||||
// Create the appropriate Zod type based on the type property
|
||||
if (type === 'string') {
|
||||
schemaObj[key] = z.string().describe(description)
|
||||
} else if (type === 'number') {
|
||||
schemaObj[key] = z.number().describe(description)
|
||||
} else if (type === 'boolean') {
|
||||
schemaObj[key] = z.boolean().describe(description)
|
||||
} else if (type === 'array') {
|
||||
// If it's an array type, check if items is defined
|
||||
if ('items' in value && value.items) {
|
||||
const itemSchema = this.createZodSchemaFromJSON(value.items)
|
||||
schemaObj[key] = z.array(itemSchema).describe(description)
|
||||
} else {
|
||||
// Default to array of any if items not specified
|
||||
schemaObj[key] = z.array(z.any()).describe(description)
|
||||
}
|
||||
} else if (type === 'object') {
|
||||
// If it's an object type, check if properties is defined
|
||||
if ('properties' in value && value.properties) {
|
||||
const nestedSchema = this.createZodSchemaFromJSON(value.properties)
|
||||
schemaObj[key] = nestedSchema.describe(description)
|
||||
} else {
|
||||
// Default to record of any if properties not specified
|
||||
schemaObj[key] = z.record(z.any()).describe(description)
|
||||
}
|
||||
} else {
|
||||
// Default to any for unknown types
|
||||
schemaObj[key] = z.any().describe(description)
|
||||
}
|
||||
|
||||
// Check if the property is optional
|
||||
if ('optional' in value && value.optional === true) {
|
||||
schemaObj[key] = schemaObj[key].optional()
|
||||
}
|
||||
} else if (Array.isArray(value)) {
|
||||
// Array values without a type property
|
||||
if (value.length > 0) {
|
||||
// If the array has items, recursively create a schema for the first item
|
||||
const itemSchema = this.createZodSchemaFromJSON(value[0])
|
||||
schemaObj[key] = z.array(itemSchema)
|
||||
} else {
|
||||
// Empty array, allow any array
|
||||
schemaObj[key] = z.array(z.any())
|
||||
}
|
||||
} else {
|
||||
// It's a nested object without a type property, recursively create schema
|
||||
schemaObj[key] = this.createZodSchemaFromJSON(value)
|
||||
}
|
||||
} else if (Array.isArray(value)) {
|
||||
// Array values
|
||||
if (value.length > 0) {
|
||||
// If the array has items, recursively create a schema for the first item
|
||||
const itemSchema = this.createZodSchemaFromJSON(value[0])
|
||||
schemaObj[key] = z.array(itemSchema)
|
||||
} else {
|
||||
// Empty array, allow any array
|
||||
schemaObj[key] = z.array(z.any())
|
||||
}
|
||||
} else {
|
||||
// For primitive values (which shouldn't be in the schema directly)
|
||||
// Use the corresponding Zod type
|
||||
if (typeof value === 'string') {
|
||||
schemaObj[key] = z.string()
|
||||
} else if (typeof value === 'number') {
|
||||
schemaObj[key] = z.number()
|
||||
} else if (typeof value === 'boolean') {
|
||||
schemaObj[key] = z.boolean()
|
||||
} else {
|
||||
schemaObj[key] = z.any()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return z.object(schemaObj)
|
||||
}
|
||||
|
||||
// Fallback to any for unknown types
|
||||
return z.any()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: LLM_Agentflow }
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ class Loop_Agentflow implements INode {
|
|||
constructor() {
|
||||
this.label = 'Loop'
|
||||
this.name = 'loopAgentflow'
|
||||
this.version = 1.1
|
||||
this.version = 1.2
|
||||
this.type = 'Loop'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Loop back to a previous node'
|
||||
|
|
@ -64,8 +64,7 @@ class Loop_Agentflow implements INode {
|
|||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys',
|
||||
freeSolo: true
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class Retriever_Agentflow implements INode {
|
|||
constructor() {
|
||||
this.label = 'Retriever'
|
||||
this.name = 'retrieverAgentflow'
|
||||
this.version = 1.0
|
||||
this.version = 1.1
|
||||
this.type = 'Retriever'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Retrieve information from vector database'
|
||||
|
|
@ -87,8 +87,7 @@ class Retriever_Agentflow implements INode {
|
|||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys',
|
||||
freeSolo: true
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ class Tool_Agentflow implements INode {
|
|||
constructor() {
|
||||
this.label = 'Tool'
|
||||
this.name = 'toolAgentflow'
|
||||
this.version = 1.1
|
||||
this.version = 1.2
|
||||
this.type = 'Tool'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Tools allow LLM to interact with external systems'
|
||||
|
|
@ -80,8 +80,7 @@ class Tool_Agentflow implements INode {
|
|||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys',
|
||||
freeSolo: true
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import { BaseMessage, MessageContentImageUrl } from '@langchain/core/messages'
|
||||
import { BaseMessage, MessageContentImageUrl, AIMessageChunk } from '@langchain/core/messages'
|
||||
import { getImageUploads } from '../../src/multiModalUtils'
|
||||
import { getFileFromStorage } from '../../src/storageUtils'
|
||||
import { ICommonObject, IFileUpload } from '../../src/Interface'
|
||||
import { addSingleFileToStorage, getFileFromStorage } from '../../src/storageUtils'
|
||||
import { ICommonObject, IFileUpload, INodeData } from '../../src/Interface'
|
||||
import { BaseMessageLike } from '@langchain/core/messages'
|
||||
import { IFlowState } from './Interface.Agentflow'
|
||||
import { handleEscapeCharacters, mapMimeTypeToInputField } from '../../src/utils'
|
||||
import { getCredentialData, getCredentialParam, handleEscapeCharacters, mapMimeTypeToInputField } from '../../src/utils'
|
||||
import fetch from 'node-fetch'
|
||||
|
||||
export const addImagesToMessages = async (
|
||||
options: ICommonObject,
|
||||
|
|
@ -18,7 +19,8 @@ export const addImagesToMessages = async (
|
|||
for (const upload of imageUploads) {
|
||||
let bf = upload.data
|
||||
if (upload.type == 'stored-file') {
|
||||
const contents = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId)
|
||||
const fileName = upload.name.replace(/^FILE-STORAGE::/, '')
|
||||
const contents = await getFileFromStorage(fileName, options.orgId, options.chatflowid, options.chatId)
|
||||
// as the image is stored in the server, read the file and convert it to base64
|
||||
bf = 'data:' + upload.mime + ';base64,' + contents.toString('base64')
|
||||
|
||||
|
|
@ -89,8 +91,9 @@ export const processMessagesWithImages = async (
|
|||
if (item.type === 'stored-file' && item.name && item.mime.startsWith('image/')) {
|
||||
hasImageReferences = true
|
||||
try {
|
||||
const fileName = item.name.replace(/^FILE-STORAGE::/, '')
|
||||
// Get file contents from storage
|
||||
const contents = await getFileFromStorage(item.name, options.orgId, options.chatflowid, options.chatId)
|
||||
const contents = await getFileFromStorage(fileName, options.orgId, options.chatflowid, options.chatId)
|
||||
|
||||
// Create base64 data URL
|
||||
const base64Data = 'data:' + item.mime + ';base64,' + contents.toString('base64')
|
||||
|
|
@ -322,7 +325,8 @@ export const getPastChatHistoryImageMessages = async (
|
|||
const imageContents: MessageContentImageUrl[] = []
|
||||
for (const upload of uploads) {
|
||||
if (upload.type === 'stored-file' && upload.mime.startsWith('image/')) {
|
||||
const fileData = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId)
|
||||
const fileName = upload.name.replace(/^FILE-STORAGE::/, '')
|
||||
const fileData = await getFileFromStorage(fileName, options.orgId, options.chatflowid, options.chatId)
|
||||
// as the image is stored in the server, read the file and convert it to base64
|
||||
const bf = 'data:' + upload.mime + ';base64,' + fileData.toString('base64')
|
||||
|
||||
|
|
@ -456,6 +460,437 @@ export const getPastChatHistoryImageMessages = async (
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets MIME type from filename extension
|
||||
*/
|
||||
export const getMimeTypeFromFilename = (filename: string): string => {
|
||||
const extension = filename.toLowerCase().split('.').pop()
|
||||
const mimeTypes: { [key: string]: string } = {
|
||||
png: 'image/png',
|
||||
jpg: 'image/jpeg',
|
||||
jpeg: 'image/jpeg',
|
||||
gif: 'image/gif',
|
||||
pdf: 'application/pdf',
|
||||
txt: 'text/plain',
|
||||
csv: 'text/csv',
|
||||
json: 'application/json',
|
||||
html: 'text/html',
|
||||
xml: 'application/xml'
|
||||
}
|
||||
return mimeTypes[extension || ''] || 'application/octet-stream'
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets artifact type from filename extension for UI rendering
|
||||
*/
|
||||
export const getArtifactTypeFromFilename = (filename: string): string => {
|
||||
const extension = filename.toLowerCase().split('.').pop()
|
||||
const artifactTypes: { [key: string]: string } = {
|
||||
png: 'png',
|
||||
jpg: 'jpeg',
|
||||
jpeg: 'jpeg',
|
||||
html: 'html',
|
||||
htm: 'html',
|
||||
md: 'markdown',
|
||||
markdown: 'markdown',
|
||||
json: 'json',
|
||||
js: 'javascript',
|
||||
javascript: 'javascript',
|
||||
tex: 'latex',
|
||||
latex: 'latex',
|
||||
txt: 'text',
|
||||
csv: 'text',
|
||||
pdf: 'text'
|
||||
}
|
||||
return artifactTypes[extension || ''] || 'text'
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves base64 image data to storage and returns file information
|
||||
*/
|
||||
export const saveBase64Image = async (
|
||||
outputItem: any,
|
||||
options: ICommonObject
|
||||
): Promise<{ filePath: string; fileName: string; totalSize: number } | null> => {
|
||||
try {
|
||||
if (!outputItem.result) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Extract base64 data and create buffer
|
||||
const base64Data = outputItem.result
|
||||
const imageBuffer = Buffer.from(base64Data, 'base64')
|
||||
|
||||
// Determine file extension and MIME type
|
||||
const outputFormat = outputItem.output_format || 'png'
|
||||
const fileName = `generated_image_${outputItem.id || Date.now()}.${outputFormat}`
|
||||
const mimeType = outputFormat === 'png' ? 'image/png' : 'image/jpeg'
|
||||
|
||||
// Save the image using the existing storage utility
|
||||
const { path, totalSize } = await addSingleFileToStorage(
|
||||
mimeType,
|
||||
imageBuffer,
|
||||
fileName,
|
||||
options.orgId,
|
||||
options.chatflowid,
|
||||
options.chatId
|
||||
)
|
||||
|
||||
return { filePath: path, fileName, totalSize }
|
||||
} catch (error) {
|
||||
console.error('Error saving base64 image:', error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves Gemini inline image data to storage and returns file information
|
||||
*/
|
||||
export const saveGeminiInlineImage = async (
|
||||
inlineItem: any,
|
||||
options: ICommonObject
|
||||
): Promise<{ filePath: string; fileName: string; totalSize: number } | null> => {
|
||||
try {
|
||||
if (!inlineItem.data || !inlineItem.mimeType) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Extract base64 data and create buffer
|
||||
const base64Data = inlineItem.data
|
||||
const imageBuffer = Buffer.from(base64Data, 'base64')
|
||||
|
||||
// Determine file extension from MIME type
|
||||
const mimeType = inlineItem.mimeType
|
||||
let extension = 'png'
|
||||
if (mimeType.includes('jpeg') || mimeType.includes('jpg')) {
|
||||
extension = 'jpg'
|
||||
} else if (mimeType.includes('png')) {
|
||||
extension = 'png'
|
||||
} else if (mimeType.includes('gif')) {
|
||||
extension = 'gif'
|
||||
} else if (mimeType.includes('webp')) {
|
||||
extension = 'webp'
|
||||
}
|
||||
|
||||
const fileName = `gemini_generated_image_${Date.now()}.${extension}`
|
||||
|
||||
// Save the image using the existing storage utility
|
||||
const { path, totalSize } = await addSingleFileToStorage(
|
||||
mimeType,
|
||||
imageBuffer,
|
||||
fileName,
|
||||
options.orgId,
|
||||
options.chatflowid,
|
||||
options.chatId
|
||||
)
|
||||
|
||||
return { filePath: path, fileName, totalSize }
|
||||
} catch (error) {
|
||||
console.error('Error saving Gemini inline image:', error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads file content from container file citation
|
||||
*/
|
||||
export const downloadContainerFile = async (
|
||||
containerId: string,
|
||||
fileId: string,
|
||||
filename: string,
|
||||
modelNodeData: INodeData,
|
||||
options: ICommonObject
|
||||
): Promise<{ filePath: string; totalSize: number } | null> => {
|
||||
try {
|
||||
const credentialData = await getCredentialData(modelNodeData.credential ?? '', options)
|
||||
const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, modelNodeData)
|
||||
|
||||
if (!openAIApiKey) {
|
||||
console.warn('No OpenAI API key available for downloading container file')
|
||||
return null
|
||||
}
|
||||
|
||||
// Download the file using OpenAI Container API
|
||||
const response = await fetch(`https://api.openai.com/v1/containers/${containerId}/files/${fileId}/content`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: '*/*',
|
||||
Authorization: `Bearer ${openAIApiKey}`
|
||||
}
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
console.warn(
|
||||
`Failed to download container file ${fileId} from container ${containerId}: ${response.status} ${response.statusText}`
|
||||
)
|
||||
return null
|
||||
}
|
||||
|
||||
// Extract the binary data from the Response object
|
||||
const data = await response.arrayBuffer()
|
||||
const dataBuffer = Buffer.from(data)
|
||||
const mimeType = getMimeTypeFromFilename(filename)
|
||||
|
||||
// Store the file using the same storage utility as OpenAIAssistant
|
||||
const { path, totalSize } = await addSingleFileToStorage(
|
||||
mimeType,
|
||||
dataBuffer,
|
||||
filename,
|
||||
options.orgId,
|
||||
options.chatflowid,
|
||||
options.chatId
|
||||
)
|
||||
|
||||
return { filePath: path, totalSize }
|
||||
} catch (error) {
|
||||
console.error('Error downloading container file:', error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace inlineData base64 with file references in the response content
|
||||
*/
|
||||
export const replaceInlineDataWithFileReferences = (
|
||||
response: AIMessageChunk,
|
||||
savedInlineImages: Array<{ filePath: string; fileName: string; mimeType: string }>
|
||||
): void => {
|
||||
// Check if content is an array
|
||||
if (!Array.isArray(response.content)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Replace base64 data with file references in response content
|
||||
let savedImageIndex = 0
|
||||
for (let i = 0; i < response.content.length; i++) {
|
||||
const contentItem = response.content[i]
|
||||
if (
|
||||
typeof contentItem === 'object' &&
|
||||
contentItem.type === 'inlineData' &&
|
||||
contentItem.inlineData &&
|
||||
savedImageIndex < savedInlineImages.length
|
||||
) {
|
||||
const savedImage = savedInlineImages[savedImageIndex]
|
||||
// Replace with file reference
|
||||
response.content[i] = {
|
||||
type: 'stored-file',
|
||||
name: savedImage.fileName,
|
||||
mime: savedImage.mimeType,
|
||||
path: savedImage.filePath
|
||||
}
|
||||
savedImageIndex++
|
||||
}
|
||||
}
|
||||
|
||||
// Clear the inlineData from response_metadata to avoid duplication
|
||||
if (response.response_metadata?.inlineData) {
|
||||
delete response.response_metadata.inlineData
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts artifacts from response metadata (both annotations and built-in tools)
|
||||
*/
|
||||
export const extractArtifactsFromResponse = async (
|
||||
responseMetadata: any,
|
||||
modelNodeData: INodeData,
|
||||
options: ICommonObject
|
||||
): Promise<{
|
||||
artifacts: any[]
|
||||
fileAnnotations: any[]
|
||||
savedInlineImages?: Array<{ filePath: string; fileName: string; mimeType: string }>
|
||||
}> => {
|
||||
const artifacts: any[] = []
|
||||
const fileAnnotations: any[] = []
|
||||
const savedInlineImages: Array<{ filePath: string; fileName: string; mimeType: string }> = []
|
||||
|
||||
// Handle Gemini inline data (image generation)
|
||||
if (responseMetadata?.inlineData && Array.isArray(responseMetadata.inlineData)) {
|
||||
for (const inlineItem of responseMetadata.inlineData) {
|
||||
if (inlineItem.type === 'gemini_inline_data' && inlineItem.data && inlineItem.mimeType) {
|
||||
try {
|
||||
const savedImageResult = await saveGeminiInlineImage(inlineItem, options)
|
||||
if (savedImageResult) {
|
||||
// Create artifact in the same format as other image artifacts
|
||||
const fileType = getArtifactTypeFromFilename(savedImageResult.fileName)
|
||||
artifacts.push({
|
||||
type: fileType,
|
||||
data: savedImageResult.filePath
|
||||
})
|
||||
|
||||
// Track saved image for replacing base64 data in content
|
||||
savedInlineImages.push({
|
||||
filePath: savedImageResult.filePath,
|
||||
fileName: savedImageResult.fileName,
|
||||
mimeType: inlineItem.mimeType
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing Gemini inline image artifact:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!responseMetadata?.output || !Array.isArray(responseMetadata.output)) {
|
||||
return { artifacts, fileAnnotations, savedInlineImages: savedInlineImages.length > 0 ? savedInlineImages : undefined }
|
||||
}
|
||||
|
||||
for (const outputItem of responseMetadata.output) {
|
||||
// Handle container file citations from annotations
|
||||
if (outputItem.type === 'message' && outputItem.content && Array.isArray(outputItem.content)) {
|
||||
for (const contentItem of outputItem.content) {
|
||||
if (contentItem.annotations && Array.isArray(contentItem.annotations)) {
|
||||
for (const annotation of contentItem.annotations) {
|
||||
if (annotation.type === 'container_file_citation' && annotation.file_id && annotation.filename) {
|
||||
try {
|
||||
// Download and store the file content
|
||||
const downloadResult = await downloadContainerFile(
|
||||
annotation.container_id,
|
||||
annotation.file_id,
|
||||
annotation.filename,
|
||||
modelNodeData,
|
||||
options
|
||||
)
|
||||
|
||||
if (downloadResult) {
|
||||
const fileType = getArtifactTypeFromFilename(annotation.filename)
|
||||
|
||||
if (fileType === 'png' || fileType === 'jpeg' || fileType === 'jpg') {
|
||||
const artifact = {
|
||||
type: fileType,
|
||||
data: downloadResult.filePath
|
||||
}
|
||||
|
||||
artifacts.push(artifact)
|
||||
} else {
|
||||
fileAnnotations.push({
|
||||
filePath: downloadResult.filePath,
|
||||
fileName: annotation.filename
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing annotation:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle built-in tool artifacts (like image generation)
|
||||
if (outputItem.type === 'image_generation_call' && outputItem.result) {
|
||||
try {
|
||||
const savedImageResult = await saveBase64Image(outputItem, options)
|
||||
if (savedImageResult) {
|
||||
// Replace the base64 result with the file path in the response metadata
|
||||
outputItem.result = savedImageResult.filePath
|
||||
|
||||
// Create artifact in the same format as other image artifacts
|
||||
const fileType = getArtifactTypeFromFilename(savedImageResult.fileName)
|
||||
artifacts.push({
|
||||
type: fileType,
|
||||
data: savedImageResult.filePath
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing image generation artifact:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { artifacts, fileAnnotations, savedInlineImages: savedInlineImages.length > 0 ? savedInlineImages : undefined }
|
||||
}
|
||||
|
||||
/**
|
||||
* Add image artifacts from previous assistant messages as user messages
|
||||
* This allows the LLM to see and reference the generated images in the conversation
|
||||
* Messages are marked with a special flag for later removal
|
||||
*/
|
||||
export const addImageArtifactsToMessages = async (messages: BaseMessageLike[], options: ICommonObject): Promise<void> => {
|
||||
const imageExtensions = ['png', 'jpg', 'jpeg', 'gif', 'webp']
|
||||
const messagesToInsert: Array<{ index: number; message: any }> = []
|
||||
|
||||
// Iterate through messages to find assistant messages with image artifacts
|
||||
for (let i = 0; i < messages.length; i++) {
|
||||
const message = messages[i] as any
|
||||
|
||||
// Check if this is an assistant message with artifacts
|
||||
if (
|
||||
(message.role === 'assistant' || message.role === 'ai') &&
|
||||
message.additional_kwargs?.artifacts &&
|
||||
Array.isArray(message.additional_kwargs.artifacts)
|
||||
) {
|
||||
const artifacts = message.additional_kwargs.artifacts
|
||||
const imageArtifacts: Array<{ type: string; name: string; mime: string }> = []
|
||||
|
||||
// Extract image artifacts
|
||||
for (const artifact of artifacts) {
|
||||
if (artifact.type && artifact.data) {
|
||||
// Check if this is an image artifact by file type
|
||||
if (imageExtensions.includes(artifact.type.toLowerCase())) {
|
||||
// Extract filename from the file path
|
||||
const fileName = artifact.data.split('/').pop() || artifact.data
|
||||
const mimeType = `image/${artifact.type.toLowerCase()}`
|
||||
|
||||
imageArtifacts.push({
|
||||
type: 'stored-file',
|
||||
name: fileName,
|
||||
mime: mimeType
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we found image artifacts, prepare to insert a user message after this assistant message
|
||||
if (imageArtifacts.length > 0) {
|
||||
// Check if the next message already contains these image artifacts to avoid duplicates
|
||||
const nextMessage = messages[i + 1] as any
|
||||
const shouldInsert =
|
||||
!nextMessage ||
|
||||
nextMessage.role !== 'user' ||
|
||||
!Array.isArray(nextMessage.content) ||
|
||||
!nextMessage.content.some(
|
||||
(item: any) =>
|
||||
(item.type === 'stored-file' || item.type === 'image_url') &&
|
||||
imageArtifacts.some((artifact) => {
|
||||
// Compare with and without FILE-STORAGE:: prefix
|
||||
const artifactName = artifact.name.replace('FILE-STORAGE::', '')
|
||||
const itemName = item.name?.replace('FILE-STORAGE::', '') || ''
|
||||
return artifactName === itemName
|
||||
})
|
||||
)
|
||||
|
||||
if (shouldInsert) {
|
||||
messagesToInsert.push({
|
||||
index: i + 1,
|
||||
message: {
|
||||
role: 'user',
|
||||
content: imageArtifacts,
|
||||
_isTemporaryImageMessage: true // Mark for later removal
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Insert messages in reverse order to maintain correct indices
|
||||
for (let i = messagesToInsert.length - 1; i >= 0; i--) {
|
||||
const { index, message } = messagesToInsert[i]
|
||||
messages.splice(index, 0, message)
|
||||
}
|
||||
|
||||
// Convert stored-file references to base64 image_url format
|
||||
if (messagesToInsert.length > 0) {
|
||||
const { updatedMessages } = await processMessagesWithImages(messages, options)
|
||||
// Replace the messages array content with the updated messages
|
||||
messages.length = 0
|
||||
messages.push(...updatedMessages)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the flow state with new values
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import { RunnableSequence } from '@langchain/core/runnables'
|
|||
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
||||
import { ChatPromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, PromptTemplate } from '@langchain/core/prompts'
|
||||
import { formatToOpenAIToolMessages } from 'langchain/agents/format_scratchpad/openai_tools'
|
||||
import { getBaseClasses, transformBracesWithColon } from '../../../src/utils'
|
||||
import { getBaseClasses, transformBracesWithColon, convertChatHistoryToText, convertBaseMessagetoIMessage } from '../../../src/utils'
|
||||
import { type ToolsAgentStep } from 'langchain/agents/openai/output_parser'
|
||||
import {
|
||||
FlowiseMemory,
|
||||
|
|
@ -23,8 +23,10 @@ import { Moderation, checkInputs, streamResponse } from '../../moderation/Modera
|
|||
import { formatResponse } from '../../outputparsers/OutputParserHelpers'
|
||||
import type { Document } from '@langchain/core/documents'
|
||||
import { BaseRetriever } from '@langchain/core/retrievers'
|
||||
import { RESPONSE_TEMPLATE } from '../../chains/ConversationalRetrievalQAChain/prompts'
|
||||
import { RESPONSE_TEMPLATE, REPHRASE_TEMPLATE } from '../../chains/ConversationalRetrievalQAChain/prompts'
|
||||
import { addImagesToMessages, llmSupportsVision } from '../../../src/multiModalUtils'
|
||||
import { StringOutputParser } from '@langchain/core/output_parsers'
|
||||
import { Tool } from '@langchain/core/tools'
|
||||
|
||||
class ConversationalRetrievalToolAgent_Agents implements INode {
|
||||
label: string
|
||||
|
|
@ -42,7 +44,7 @@ class ConversationalRetrievalToolAgent_Agents implements INode {
|
|||
constructor(fields?: { sessionId?: string }) {
|
||||
this.label = 'Conversational Retrieval Tool Agent'
|
||||
this.name = 'conversationalRetrievalToolAgent'
|
||||
this.author = 'niztal(falkor)'
|
||||
this.author = 'niztal(falkor) and nikitas-novatix'
|
||||
this.version = 1.0
|
||||
this.type = 'AgentExecutor'
|
||||
this.category = 'Agents'
|
||||
|
|
@ -79,6 +81,26 @@ class ConversationalRetrievalToolAgent_Agents implements INode {
|
|||
optional: true,
|
||||
default: RESPONSE_TEMPLATE
|
||||
},
|
||||
{
|
||||
label: 'Rephrase Prompt',
|
||||
name: 'rephrasePrompt',
|
||||
type: 'string',
|
||||
description: 'Using previous chat history, rephrase question into a standalone question',
|
||||
warning: 'Prompt must include input variables: {chat_history} and {question}',
|
||||
rows: 4,
|
||||
additionalParams: true,
|
||||
optional: true,
|
||||
default: REPHRASE_TEMPLATE
|
||||
},
|
||||
{
|
||||
label: 'Rephrase Model',
|
||||
name: 'rephraseModel',
|
||||
type: 'BaseChatModel',
|
||||
description:
|
||||
'Optional: Use a different (faster/cheaper) model for rephrasing. If not specified, uses the main Tool Calling Chat Model.',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Input Moderation',
|
||||
description: 'Detect text that could generate harmful output and prevent it from being sent to the language model',
|
||||
|
|
@ -103,8 +125,9 @@ class ConversationalRetrievalToolAgent_Agents implements INode {
|
|||
this.sessionId = fields?.sessionId
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
|
||||
return prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
|
||||
// The agent will be prepared in run() with the correct user message - it needs the actual runtime input for rephrasing
|
||||
async init(_nodeData: INodeData, _input: string, _options: ICommonObject): Promise<any> {
|
||||
return null
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
|
||||
|
|
@ -148,6 +171,23 @@ class ConversationalRetrievalToolAgent_Agents implements INode {
|
|||
sseStreamer.streamUsedToolsEvent(chatId, res.usedTools)
|
||||
usedTools = res.usedTools
|
||||
}
|
||||
|
||||
// If the tool is set to returnDirect, stream the output to the client
|
||||
if (res.usedTools && res.usedTools.length) {
|
||||
let inputTools = nodeData.inputs?.tools
|
||||
inputTools = flatten(inputTools)
|
||||
for (const tool of res.usedTools) {
|
||||
const inputTool = inputTools.find((inputTool: Tool) => inputTool.name === tool.tool)
|
||||
if (inputTool && (inputTool as any).returnDirect && shouldStreamResponse) {
|
||||
sseStreamer.streamTokenEvent(chatId, tool.toolOutput)
|
||||
// Prevent CustomChainHandler from streaming the same output again
|
||||
if (res.output === tool.toolOutput) {
|
||||
res.output = ''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// The CustomChainHandler will send the stream end event
|
||||
} else {
|
||||
res = await executor.invoke({ input }, { callbacks: [loggerHandler, ...callbacks] })
|
||||
if (res.sourceDocuments) {
|
||||
|
|
@ -210,9 +250,11 @@ const prepareAgent = async (
|
|||
flowObj: { sessionId?: string; chatId?: string; input?: string }
|
||||
) => {
|
||||
const model = nodeData.inputs?.model as BaseChatModel
|
||||
const rephraseModel = (nodeData.inputs?.rephraseModel as BaseChatModel) || model // Use main model if not specified
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
let systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let rephrasePrompt = nodeData.inputs?.rephrasePrompt as string
|
||||
let tools = nodeData.inputs?.tools
|
||||
tools = flatten(tools)
|
||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||
|
|
@ -220,6 +262,9 @@ const prepareAgent = async (
|
|||
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever
|
||||
|
||||
systemMessage = transformBracesWithColon(systemMessage)
|
||||
if (rephrasePrompt) {
|
||||
rephrasePrompt = transformBracesWithColon(rephrasePrompt)
|
||||
}
|
||||
|
||||
const prompt = ChatPromptTemplate.fromMessages([
|
||||
['system', systemMessage ? systemMessage : `You are a helpful AI assistant.`],
|
||||
|
|
@ -263,6 +308,37 @@ const prepareAgent = async (
|
|||
|
||||
const modelWithTools = model.bindTools(tools)
|
||||
|
||||
// Function to get standalone question (either rephrased or original)
|
||||
const getStandaloneQuestion = async (input: string): Promise<string> => {
|
||||
// If no rephrase prompt, return the original input
|
||||
if (!rephrasePrompt) {
|
||||
return input
|
||||
}
|
||||
|
||||
// Get chat history (use empty string if none)
|
||||
const messages = (await memory.getChatMessages(flowObj?.sessionId, true)) as BaseMessage[]
|
||||
const iMessages = convertBaseMessagetoIMessage(messages)
|
||||
const chatHistoryString = convertChatHistoryToText(iMessages)
|
||||
|
||||
// Always rephrase to normalize/expand user queries for better retrieval
|
||||
try {
|
||||
const CONDENSE_QUESTION_PROMPT = PromptTemplate.fromTemplate(rephrasePrompt)
|
||||
const condenseQuestionChain = RunnableSequence.from([CONDENSE_QUESTION_PROMPT, rephraseModel, new StringOutputParser()])
|
||||
const res = await condenseQuestionChain.invoke({
|
||||
question: input,
|
||||
chat_history: chatHistoryString
|
||||
})
|
||||
return res
|
||||
} catch (error) {
|
||||
console.error('Error rephrasing question:', error)
|
||||
// On error, fall back to original input
|
||||
return input
|
||||
}
|
||||
}
|
||||
|
||||
// Get standalone question before creating runnable
|
||||
const standaloneQuestion = await getStandaloneQuestion(flowObj?.input || '')
|
||||
|
||||
const runnableAgent = RunnableSequence.from([
|
||||
{
|
||||
[inputKey]: (i: { input: string; steps: ToolsAgentStep[] }) => i.input,
|
||||
|
|
@ -272,7 +348,9 @@ const prepareAgent = async (
|
|||
return messages ?? []
|
||||
},
|
||||
context: async (i: { input: string; chatHistory?: string }) => {
|
||||
const relevantDocs = await vectorStoreRetriever.invoke(i.input)
|
||||
// Use the standalone question (rephrased or original) for retrieval
|
||||
const retrievalQuery = standaloneQuestion || i.input
|
||||
const relevantDocs = await vectorStoreRetriever.invoke(retrievalQuery)
|
||||
const formattedDocs = formatDocs(relevantDocs)
|
||||
return formattedDocs
|
||||
}
|
||||
|
|
@ -295,4 +373,6 @@ const prepareAgent = async (
|
|||
return executor
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ConversationalRetrievalToolAgent_Agents }
|
||||
module.exports = {
|
||||
nodeClass: ConversationalRetrievalToolAgent_Agents
|
||||
}
|
||||
|
|
|
|||
|
|
@ -578,7 +578,7 @@ class OpenAIAssistant_Agents implements INode {
|
|||
toolOutput
|
||||
})
|
||||
} catch (e) {
|
||||
await analyticHandlers.onToolEnd(toolIds, e)
|
||||
await analyticHandlers.onToolError(toolIds, e)
|
||||
console.error('Error executing tool', e)
|
||||
throw new Error(
|
||||
`Error executing tool. Tool: ${tool.name}. Thread ID: ${threadId}. Run ID: ${runThreadId}`
|
||||
|
|
@ -703,7 +703,7 @@ class OpenAIAssistant_Agents implements INode {
|
|||
toolOutput
|
||||
})
|
||||
} catch (e) {
|
||||
await analyticHandlers.onToolEnd(toolIds, e)
|
||||
await analyticHandlers.onToolError(toolIds, e)
|
||||
console.error('Error executing tool', e)
|
||||
clearInterval(timeout)
|
||||
reject(
|
||||
|
|
@ -1096,7 +1096,7 @@ async function handleToolSubmission(params: ToolSubmissionParams): Promise<ToolS
|
|||
toolOutput
|
||||
})
|
||||
} catch (e) {
|
||||
await analyticHandlers.onToolEnd(toolIds, e)
|
||||
await analyticHandlers.onToolError(toolIds, e)
|
||||
console.error('Error executing tool', e)
|
||||
throw new Error(`Error executing tool. Tool: ${tool.name}. Thread ID: ${threadId}. Run ID: ${runThreadId}`)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -607,7 +607,12 @@ export class LangchainChatGoogleGenerativeAI
|
|||
private client: GenerativeModel
|
||||
|
||||
get _isMultimodalModel() {
|
||||
return this.model.includes('vision') || this.model.startsWith('gemini-1.5') || this.model.startsWith('gemini-2')
|
||||
return (
|
||||
this.model.includes('vision') ||
|
||||
this.model.startsWith('gemini-1.5') ||
|
||||
this.model.startsWith('gemini-2') ||
|
||||
this.model.startsWith('gemini-3')
|
||||
)
|
||||
}
|
||||
|
||||
constructor(fields: GoogleGenerativeAIChatInput) {
|
||||
|
|
|
|||
|
|
@ -452,6 +452,7 @@ export function mapGenerateContentResultToChatResult(
|
|||
const [candidate] = response.candidates
|
||||
const { content: candidateContent, ...generationInfo } = candidate
|
||||
let content: MessageContent | undefined
|
||||
const inlineDataItems: any[] = []
|
||||
|
||||
if (Array.isArray(candidateContent?.parts) && candidateContent.parts.length === 1 && candidateContent.parts[0].text) {
|
||||
content = candidateContent.parts[0].text
|
||||
|
|
@ -472,6 +473,18 @@ export function mapGenerateContentResultToChatResult(
|
|||
type: 'codeExecutionResult',
|
||||
codeExecutionResult: p.codeExecutionResult
|
||||
}
|
||||
} else if ('inlineData' in p && p.inlineData) {
|
||||
// Extract inline image data for processing by Agent
|
||||
inlineDataItems.push({
|
||||
type: 'gemini_inline_data',
|
||||
mimeType: p.inlineData.mimeType,
|
||||
data: p.inlineData.data
|
||||
})
|
||||
// Return the inline data as part of the content structure
|
||||
return {
|
||||
type: 'inlineData',
|
||||
inlineData: p.inlineData
|
||||
}
|
||||
}
|
||||
return p
|
||||
})
|
||||
|
|
@ -488,6 +501,12 @@ export function mapGenerateContentResultToChatResult(
|
|||
text = block?.text ?? text
|
||||
}
|
||||
|
||||
// Build response_metadata with inline data if present
|
||||
const response_metadata: any = {}
|
||||
if (inlineDataItems.length > 0) {
|
||||
response_metadata.inlineData = inlineDataItems
|
||||
}
|
||||
|
||||
const generation: ChatGeneration = {
|
||||
text,
|
||||
message: new AIMessage({
|
||||
|
|
@ -502,7 +521,8 @@ export function mapGenerateContentResultToChatResult(
|
|||
additional_kwargs: {
|
||||
...generationInfo
|
||||
},
|
||||
usage_metadata: extra?.usageMetadata
|
||||
usage_metadata: extra?.usageMetadata,
|
||||
response_metadata: Object.keys(response_metadata).length > 0 ? response_metadata : undefined
|
||||
}),
|
||||
generationInfo
|
||||
}
|
||||
|
|
@ -533,6 +553,8 @@ export function convertResponseContentToChatGenerationChunk(
|
|||
const [candidate] = response.candidates
|
||||
const { content: candidateContent, ...generationInfo } = candidate
|
||||
let content: MessageContent | undefined
|
||||
const inlineDataItems: any[] = []
|
||||
|
||||
// Checks if some parts do not have text. If false, it means that the content is a string.
|
||||
if (Array.isArray(candidateContent?.parts) && candidateContent.parts.every((p) => 'text' in p)) {
|
||||
content = candidateContent.parts.map((p) => p.text).join('')
|
||||
|
|
@ -553,6 +575,18 @@ export function convertResponseContentToChatGenerationChunk(
|
|||
type: 'codeExecutionResult',
|
||||
codeExecutionResult: p.codeExecutionResult
|
||||
}
|
||||
} else if ('inlineData' in p && p.inlineData) {
|
||||
// Extract inline image data for processing by Agent
|
||||
inlineDataItems.push({
|
||||
type: 'gemini_inline_data',
|
||||
mimeType: p.inlineData.mimeType,
|
||||
data: p.inlineData.data
|
||||
})
|
||||
// Return the inline data as part of the content structure
|
||||
return {
|
||||
type: 'inlineData',
|
||||
inlineData: p.inlineData
|
||||
}
|
||||
}
|
||||
return p
|
||||
})
|
||||
|
|
@ -582,6 +616,12 @@ export function convertResponseContentToChatGenerationChunk(
|
|||
)
|
||||
}
|
||||
|
||||
// Build response_metadata with inline data if present
|
||||
const response_metadata: any = {}
|
||||
if (inlineDataItems.length > 0) {
|
||||
response_metadata.inlineData = inlineDataItems
|
||||
}
|
||||
|
||||
return new ChatGenerationChunk({
|
||||
text,
|
||||
message: new AIMessageChunk({
|
||||
|
|
@ -591,7 +631,8 @@ export function convertResponseContentToChatGenerationChunk(
|
|||
// Each chunk can have unique "generationInfo", and merging strategy is unclear,
|
||||
// so leave blank for now.
|
||||
additional_kwargs: {},
|
||||
usage_metadata: extra.usageMetadata
|
||||
usage_metadata: extra.usageMetadata,
|
||||
response_metadata: Object.keys(response_metadata).length > 0 ? response_metadata : undefined
|
||||
}),
|
||||
generationInfo
|
||||
})
|
||||
|
|
|
|||
|
|
@ -41,15 +41,17 @@ class ChatHuggingFace_ChatModels implements INode {
|
|||
label: 'Model',
|
||||
name: 'model',
|
||||
type: 'string',
|
||||
description: 'If using own inference endpoint, leave this blank',
|
||||
placeholder: 'gpt2'
|
||||
description:
|
||||
'Model name (e.g., deepseek-ai/DeepSeek-V3.2-Exp:novita). If model includes provider (:) or using router endpoint, leave Endpoint blank.',
|
||||
placeholder: 'deepseek-ai/DeepSeek-V3.2-Exp:novita'
|
||||
},
|
||||
{
|
||||
label: 'Endpoint',
|
||||
name: 'endpoint',
|
||||
type: 'string',
|
||||
placeholder: 'https://xyz.eu-west-1.aws.endpoints.huggingface.cloud/gpt2',
|
||||
description: 'Using your own inference endpoint',
|
||||
description:
|
||||
'Custom inference endpoint (optional). Not needed for models with providers (:) or router endpoints. Leave blank to use Inference Providers.',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
|
|
@ -103,7 +105,7 @@ class ChatHuggingFace_ChatModels implements INode {
|
|||
type: 'string',
|
||||
rows: 4,
|
||||
placeholder: 'AI assistant:',
|
||||
description: 'Sets the stop sequences to use. Use comma to seperate different sequences.',
|
||||
description: 'Sets the stop sequences to use. Use comma to separate different sequences.',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
|
|
@ -124,6 +126,15 @@ class ChatHuggingFace_ChatModels implements INode {
|
|||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const huggingFaceApiKey = getCredentialParam('huggingFaceApiKey', credentialData, nodeData)
|
||||
|
||||
if (!huggingFaceApiKey) {
|
||||
console.error('[ChatHuggingFace] API key validation failed: No API key found')
|
||||
throw new Error('HuggingFace API key is required. Please configure it in the credential settings.')
|
||||
}
|
||||
|
||||
if (!huggingFaceApiKey.startsWith('hf_')) {
|
||||
console.warn('[ChatHuggingFace] API key format warning: Key does not start with "hf_"')
|
||||
}
|
||||
|
||||
const obj: Partial<HFInput> = {
|
||||
model,
|
||||
apiKey: huggingFaceApiKey
|
||||
|
|
|
|||
|
|
@ -56,9 +56,9 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
|||
this.apiKey = fields?.apiKey ?? getEnvironmentVariable('HUGGINGFACEHUB_API_KEY')
|
||||
this.endpointUrl = fields?.endpointUrl
|
||||
this.includeCredentials = fields?.includeCredentials
|
||||
if (!this.apiKey) {
|
||||
if (!this.apiKey || this.apiKey.trim() === '') {
|
||||
throw new Error(
|
||||
'Please set an API key for HuggingFace Hub in the environment variable HUGGINGFACEHUB_API_KEY or in the apiKey field of the HuggingFaceInference constructor.'
|
||||
'Please set an API key for HuggingFace Hub. Either configure it in the credential settings in the UI, or set the environment variable HUGGINGFACEHUB_API_KEY.'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -68,19 +68,21 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
|||
}
|
||||
|
||||
invocationParams(options?: this['ParsedCallOptions']) {
|
||||
return {
|
||||
model: this.model,
|
||||
parameters: {
|
||||
// make it behave similar to openai, returning only the generated text
|
||||
return_full_text: false,
|
||||
temperature: this.temperature,
|
||||
max_new_tokens: this.maxTokens,
|
||||
stop: options?.stop ?? this.stopSequences,
|
||||
top_p: this.topP,
|
||||
top_k: this.topK,
|
||||
repetition_penalty: this.frequencyPenalty
|
||||
}
|
||||
// Return parameters compatible with chatCompletion API (OpenAI-compatible format)
|
||||
const params: any = {
|
||||
temperature: this.temperature,
|
||||
max_tokens: this.maxTokens,
|
||||
stop: options?.stop ?? this.stopSequences,
|
||||
top_p: this.topP
|
||||
}
|
||||
// Include optional parameters if they are defined
|
||||
if (this.topK !== undefined) {
|
||||
params.top_k = this.topK
|
||||
}
|
||||
if (this.frequencyPenalty !== undefined) {
|
||||
params.frequency_penalty = this.frequencyPenalty
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
async *_streamResponseChunks(
|
||||
|
|
@ -88,51 +90,109 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
|||
options: this['ParsedCallOptions'],
|
||||
runManager?: CallbackManagerForLLMRun
|
||||
): AsyncGenerator<GenerationChunk> {
|
||||
const hfi = await this._prepareHFInference()
|
||||
const stream = await this.caller.call(async () =>
|
||||
hfi.textGenerationStream({
|
||||
...this.invocationParams(options),
|
||||
inputs: prompt
|
||||
})
|
||||
)
|
||||
for await (const chunk of stream) {
|
||||
const token = chunk.token.text
|
||||
yield new GenerationChunk({ text: token, generationInfo: chunk })
|
||||
await runManager?.handleLLMNewToken(token ?? '')
|
||||
|
||||
// stream is done
|
||||
if (chunk.generated_text)
|
||||
yield new GenerationChunk({
|
||||
text: '',
|
||||
generationInfo: { finished: true }
|
||||
try {
|
||||
const client = await this._prepareHFInference()
|
||||
const stream = await this.caller.call(async () =>
|
||||
client.chatCompletionStream({
|
||||
model: this.model,
|
||||
messages: [{ role: 'user', content: prompt }],
|
||||
...this.invocationParams(options)
|
||||
})
|
||||
)
|
||||
for await (const chunk of stream) {
|
||||
const token = chunk.choices[0]?.delta?.content || ''
|
||||
if (token) {
|
||||
yield new GenerationChunk({ text: token, generationInfo: chunk })
|
||||
await runManager?.handleLLMNewToken(token)
|
||||
}
|
||||
// stream is done when finish_reason is set
|
||||
if (chunk.choices[0]?.finish_reason) {
|
||||
yield new GenerationChunk({
|
||||
text: '',
|
||||
generationInfo: { finished: true }
|
||||
})
|
||||
break
|
||||
}
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.error('[ChatHuggingFace] Error in _streamResponseChunks:', error)
|
||||
// Provide more helpful error messages
|
||||
if (error?.message?.includes('endpointUrl') || error?.message?.includes('third-party provider')) {
|
||||
throw new Error(
|
||||
`Cannot use custom endpoint with model "${this.model}" that includes a provider. Please leave the Endpoint field blank in the UI. Original error: ${error.message}`
|
||||
)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
async _call(prompt: string, options: this['ParsedCallOptions']): Promise<string> {
|
||||
const hfi = await this._prepareHFInference()
|
||||
const args = { ...this.invocationParams(options), inputs: prompt }
|
||||
const res = await this.caller.callWithOptions({ signal: options.signal }, hfi.textGeneration.bind(hfi), args)
|
||||
return res.generated_text
|
||||
try {
|
||||
const client = await this._prepareHFInference()
|
||||
// Use chatCompletion for chat models (v4 supports conversational models via Inference Providers)
|
||||
const args = {
|
||||
model: this.model,
|
||||
messages: [{ role: 'user', content: prompt }],
|
||||
...this.invocationParams(options)
|
||||
}
|
||||
const res = await this.caller.callWithOptions({ signal: options.signal }, client.chatCompletion.bind(client), args)
|
||||
const content = res.choices[0]?.message?.content || ''
|
||||
if (!content) {
|
||||
console.error('[ChatHuggingFace] No content in response:', JSON.stringify(res))
|
||||
throw new Error(`No content received from HuggingFace API. Response: ${JSON.stringify(res)}`)
|
||||
}
|
||||
return content
|
||||
} catch (error: any) {
|
||||
console.error('[ChatHuggingFace] Error in _call:', error.message)
|
||||
// Provide more helpful error messages
|
||||
if (error?.message?.includes('endpointUrl') || error?.message?.includes('third-party provider')) {
|
||||
throw new Error(
|
||||
`Cannot use custom endpoint with model "${this.model}" that includes a provider. Please leave the Endpoint field blank in the UI. Original error: ${error.message}`
|
||||
)
|
||||
}
|
||||
if (error?.message?.includes('Invalid username or password') || error?.message?.includes('authentication')) {
|
||||
throw new Error(
|
||||
`HuggingFace API authentication failed. Please verify your API key is correct and starts with "hf_". Original error: ${error.message}`
|
||||
)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
private async _prepareHFInference() {
|
||||
const { HfInference } = await HuggingFaceInference.imports()
|
||||
const hfi = new HfInference(this.apiKey, {
|
||||
includeCredentials: this.includeCredentials
|
||||
})
|
||||
return this.endpointUrl ? hfi.endpoint(this.endpointUrl) : hfi
|
||||
if (!this.apiKey || this.apiKey.trim() === '') {
|
||||
console.error('[ChatHuggingFace] API key validation failed: Empty or undefined')
|
||||
throw new Error('HuggingFace API key is required. Please configure it in the credential settings.')
|
||||
}
|
||||
|
||||
const { InferenceClient } = await HuggingFaceInference.imports()
|
||||
// Use InferenceClient for chat models (works better with Inference Providers)
|
||||
const client = new InferenceClient(this.apiKey)
|
||||
|
||||
// Don't override endpoint if model uses a provider (contains ':') or if endpoint is router-based
|
||||
// When using Inference Providers, endpoint should be left blank - InferenceClient handles routing automatically
|
||||
if (
|
||||
this.endpointUrl &&
|
||||
!this.model.includes(':') &&
|
||||
!this.endpointUrl.includes('/v1/chat/completions') &&
|
||||
!this.endpointUrl.includes('router.huggingface.co')
|
||||
) {
|
||||
return client.endpoint(this.endpointUrl)
|
||||
}
|
||||
|
||||
// Return client without endpoint override - InferenceClient will use Inference Providers automatically
|
||||
return client
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
static async imports(): Promise<{
|
||||
HfInference: typeof import('@huggingface/inference').HfInference
|
||||
InferenceClient: typeof import('@huggingface/inference').InferenceClient
|
||||
}> {
|
||||
try {
|
||||
const { HfInference } = await import('@huggingface/inference')
|
||||
return { HfInference }
|
||||
const { InferenceClient } = await import('@huggingface/inference')
|
||||
return { InferenceClient }
|
||||
} catch (e) {
|
||||
throw new Error('Please install huggingface as a dependency with, e.g. `pnpm install @huggingface/inference`')
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import { ChatOpenAI, ChatOpenAIFields } from '@langchain/openai'
|
||||
import { ChatOpenAI as LangchainChatOpenAI, ChatOpenAIFields } from '@langchain/openai'
|
||||
import { BaseCache } from '@langchain/core/caches'
|
||||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ChatOpenRouter } from './FlowiseChatOpenRouter'
|
||||
|
||||
class ChatOpenRouter_ChatModels implements INode {
|
||||
label: string
|
||||
|
|
@ -23,7 +24,7 @@ class ChatOpenRouter_ChatModels implements INode {
|
|||
this.icon = 'openRouter.svg'
|
||||
this.category = 'Chat Models'
|
||||
this.description = 'Wrapper around Open Router Inference API'
|
||||
this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)]
|
||||
this.baseClasses = [this.type, ...getBaseClasses(LangchainChatOpenAI)]
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
@ -114,6 +115,40 @@ class ChatOpenRouter_ChatModels implements INode {
|
|||
type: 'json',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Allow Image Uploads',
|
||||
name: 'allowImageUploads',
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Allow image input. Refer to the <a href="https://docs.flowiseai.com/using-flowise/uploads#image" target="_blank">docs</a> for more details.',
|
||||
default: false,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Image Resolution',
|
||||
description: 'This parameter controls the resolution in which the model views the image.',
|
||||
name: 'imageResolution',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'Low',
|
||||
name: 'low'
|
||||
},
|
||||
{
|
||||
label: 'High',
|
||||
name: 'high'
|
||||
},
|
||||
{
|
||||
label: 'Auto',
|
||||
name: 'auto'
|
||||
}
|
||||
],
|
||||
default: 'low',
|
||||
optional: false,
|
||||
show: {
|
||||
allowImageUploads: true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -130,6 +165,8 @@ class ChatOpenRouter_ChatModels implements INode {
|
|||
const basePath = (nodeData.inputs?.basepath as string) || 'https://openrouter.ai/api/v1'
|
||||
const baseOptions = nodeData.inputs?.baseOptions
|
||||
const cache = nodeData.inputs?.cache as BaseCache
|
||||
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean
|
||||
const imageResolution = nodeData.inputs?.imageResolution as string
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const openRouterApiKey = getCredentialParam('openRouterApiKey', credentialData, nodeData)
|
||||
|
|
@ -155,7 +192,7 @@ class ChatOpenRouter_ChatModels implements INode {
|
|||
try {
|
||||
parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions)
|
||||
} catch (exception) {
|
||||
throw new Error("Invalid JSON in the ChatCerebras's BaseOptions: " + exception)
|
||||
throw new Error("Invalid JSON in the ChatOpenRouter's BaseOptions: " + exception)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -166,7 +203,15 @@ class ChatOpenRouter_ChatModels implements INode {
|
|||
}
|
||||
}
|
||||
|
||||
const model = new ChatOpenAI(obj)
|
||||
const multiModalOption: IMultiModalOption = {
|
||||
image: {
|
||||
allowImageUploads: allowImageUploads ?? false,
|
||||
imageResolution
|
||||
}
|
||||
}
|
||||
|
||||
const model = new ChatOpenRouter(nodeData.id, obj)
|
||||
model.setMultiModalOption(multiModalOption)
|
||||
return model
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
import { ChatOpenAI as LangchainChatOpenAI, ChatOpenAIFields } from '@langchain/openai'
|
||||
import { IMultiModalOption, IVisionChatModal } from '../../../src'
|
||||
|
||||
export class ChatOpenRouter extends LangchainChatOpenAI implements IVisionChatModal {
|
||||
configuredModel: string
|
||||
configuredMaxToken?: number
|
||||
multiModalOption: IMultiModalOption
|
||||
id: string
|
||||
|
||||
constructor(id: string, fields?: ChatOpenAIFields) {
|
||||
super(fields)
|
||||
this.id = id
|
||||
this.configuredModel = fields?.modelName ?? ''
|
||||
this.configuredMaxToken = fields?.maxTokens
|
||||
}
|
||||
|
||||
revertToOriginalModel(): void {
|
||||
this.model = this.configuredModel
|
||||
this.maxTokens = this.configuredMaxToken
|
||||
}
|
||||
|
||||
setMultiModalOption(multiModalOption: IMultiModalOption): void {
|
||||
this.multiModalOption = multiModalOption
|
||||
}
|
||||
|
||||
setVisionModel(): void {
|
||||
// pass - OpenRouter models don't need model switching
|
||||
}
|
||||
}
|
||||
|
|
@ -47,7 +47,7 @@ class Json_DocumentLoaders implements INode {
|
|||
constructor() {
|
||||
this.label = 'Json File'
|
||||
this.name = 'jsonFile'
|
||||
this.version = 3.0
|
||||
this.version = 3.1
|
||||
this.type = 'Document'
|
||||
this.icon = 'json.svg'
|
||||
this.category = 'Document Loaders'
|
||||
|
|
@ -66,6 +66,14 @@ class Json_DocumentLoaders implements INode {
|
|||
type: 'TextSplitter',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Separate by JSON Object (JSON Array)',
|
||||
name: 'separateByObject',
|
||||
type: 'boolean',
|
||||
description: 'If enabled and the file is a JSON Array, each JSON object will be extracted as a chunk',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Pointers Extraction (separated by commas)',
|
||||
name: 'pointersName',
|
||||
|
|
@ -73,7 +81,10 @@ class Json_DocumentLoaders implements INode {
|
|||
description:
|
||||
'Ex: { "key": "value" }, Pointer Extraction = "key", "value" will be extracted as pageContent of the chunk. Use comma to separate multiple pointers',
|
||||
placeholder: 'key1, key2',
|
||||
optional: true
|
||||
optional: true,
|
||||
hide: {
|
||||
separateByObject: true
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Additional Metadata',
|
||||
|
|
@ -122,6 +133,7 @@ class Json_DocumentLoaders implements INode {
|
|||
const pointersName = nodeData.inputs?.pointersName as string
|
||||
const metadata = nodeData.inputs?.metadata
|
||||
const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string
|
||||
const separateByObject = nodeData.inputs?.separateByObject as boolean
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
let omitMetadataKeys: string[] = []
|
||||
|
|
@ -153,7 +165,7 @@ class Json_DocumentLoaders implements INode {
|
|||
if (!file) continue
|
||||
const fileData = await getFileFromStorage(file, orgId, chatflowid)
|
||||
const blob = new Blob([fileData])
|
||||
const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata)
|
||||
const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata, separateByObject)
|
||||
|
||||
if (textSplitter) {
|
||||
let splittedDocs = await loader.load()
|
||||
|
|
@ -176,7 +188,7 @@ class Json_DocumentLoaders implements INode {
|
|||
splitDataURI.pop()
|
||||
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
|
||||
const blob = new Blob([bf])
|
||||
const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata)
|
||||
const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata, separateByObject)
|
||||
|
||||
if (textSplitter) {
|
||||
let splittedDocs = await loader.load()
|
||||
|
|
@ -306,13 +318,20 @@ class TextLoader extends BaseDocumentLoader {
|
|||
class JSONLoader extends TextLoader {
|
||||
public pointers: string[]
|
||||
private metadataMapping: Record<string, string>
|
||||
private separateByObject: boolean
|
||||
|
||||
constructor(filePathOrBlob: string | Blob, pointers: string | string[] = [], metadataMapping: Record<string, string> = {}) {
|
||||
constructor(
|
||||
filePathOrBlob: string | Blob,
|
||||
pointers: string | string[] = [],
|
||||
metadataMapping: Record<string, string> = {},
|
||||
separateByObject: boolean = false
|
||||
) {
|
||||
super(filePathOrBlob)
|
||||
this.pointers = Array.isArray(pointers) ? pointers : [pointers]
|
||||
if (metadataMapping) {
|
||||
this.metadataMapping = typeof metadataMapping === 'object' ? metadataMapping : JSON.parse(metadataMapping)
|
||||
}
|
||||
this.separateByObject = separateByObject
|
||||
}
|
||||
|
||||
protected async parse(raw: string): Promise<Document[]> {
|
||||
|
|
@ -323,14 +342,24 @@ class JSONLoader extends TextLoader {
|
|||
const jsonArray = Array.isArray(json) ? json : [json]
|
||||
|
||||
for (const item of jsonArray) {
|
||||
const content = this.extractContent(item)
|
||||
const metadata = this.extractMetadata(item)
|
||||
|
||||
for (const pageContent of content) {
|
||||
documents.push({
|
||||
pageContent,
|
||||
metadata
|
||||
})
|
||||
if (this.separateByObject) {
|
||||
if (typeof item === 'object' && item !== null && !Array.isArray(item)) {
|
||||
const metadata = this.extractMetadata(item)
|
||||
const pageContent = this.formatObjectAsKeyValue(item)
|
||||
documents.push({
|
||||
pageContent,
|
||||
metadata
|
||||
})
|
||||
}
|
||||
} else {
|
||||
const content = this.extractContent(item)
|
||||
const metadata = this.extractMetadata(item)
|
||||
for (const pageContent of content) {
|
||||
documents.push({
|
||||
pageContent,
|
||||
metadata
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -370,6 +399,30 @@ class JSONLoader extends TextLoader {
|
|||
return metadata
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a JSON object as readable key-value pairs
|
||||
*/
|
||||
private formatObjectAsKeyValue(obj: any, prefix: string = ''): string {
|
||||
const lines: string[] = []
|
||||
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
const fullKey = prefix ? `${prefix}.${key}` : key
|
||||
|
||||
if (value === null || value === undefined) {
|
||||
lines.push(`${fullKey}: ${value}`)
|
||||
} else if (Array.isArray(value)) {
|
||||
lines.push(`${fullKey}: ${JSON.stringify(value)}`)
|
||||
} else if (typeof value === 'object') {
|
||||
// Recursively format nested objects
|
||||
lines.push(this.formatObjectAsKeyValue(value, fullKey))
|
||||
} else {
|
||||
lines.push(`${fullKey}: ${value}`)
|
||||
}
|
||||
}
|
||||
|
||||
return lines.join('\n')
|
||||
}
|
||||
|
||||
/**
|
||||
* If JSON pointers are specified, return all strings below any of them
|
||||
* and exclude all other nodes expect if they match a JSON pointer.
|
||||
|
|
|
|||
|
|
@ -190,11 +190,14 @@ class Playwright_DocumentLoaders implements INode {
|
|||
async function playwrightLoader(url: string): Promise<Document[] | undefined> {
|
||||
try {
|
||||
let docs = []
|
||||
|
||||
const executablePath = process.env.PLAYWRIGHT_EXECUTABLE_PATH
|
||||
|
||||
const config: PlaywrightWebBaseLoaderOptions = {
|
||||
launchOptions: {
|
||||
args: ['--no-sandbox'],
|
||||
headless: true,
|
||||
executablePath: process.env.PLAYWRIGHT_EXECUTABLE_FILE_PATH
|
||||
executablePath: executablePath
|
||||
}
|
||||
}
|
||||
if (waitUntilGoToOption) {
|
||||
|
|
|
|||
|
|
@ -181,11 +181,14 @@ class Puppeteer_DocumentLoaders implements INode {
|
|||
async function puppeteerLoader(url: string): Promise<Document[] | undefined> {
|
||||
try {
|
||||
let docs: Document[] = []
|
||||
|
||||
const executablePath = process.env.PUPPETEER_EXECUTABLE_PATH
|
||||
|
||||
const config: PuppeteerWebBaseLoaderOptions = {
|
||||
launchOptions: {
|
||||
args: ['--no-sandbox'],
|
||||
headless: 'new',
|
||||
executablePath: process.env.PUPPETEER_EXECUTABLE_FILE_PATH
|
||||
executablePath: executablePath
|
||||
}
|
||||
}
|
||||
if (waitUntilGoToOption) {
|
||||
|
|
|
|||
|
|
@ -27,8 +27,6 @@ type Element = {
|
|||
}
|
||||
|
||||
export class UnstructuredLoader extends BaseDocumentLoader {
|
||||
public filePath: string
|
||||
|
||||
private apiUrl = process.env.UNSTRUCTURED_API_URL || 'https://api.unstructuredapp.io/general/v0/general'
|
||||
|
||||
private apiKey: string | undefined = process.env.UNSTRUCTURED_API_KEY
|
||||
|
|
@ -138,7 +136,7 @@ export class UnstructuredLoader extends BaseDocumentLoader {
|
|||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to partition file ${this.filePath} with error ${response.status} and message ${await response.text()}`)
|
||||
throw new Error(`Failed to partition file with error ${response.status} and message ${await response.text()}`)
|
||||
}
|
||||
|
||||
const elements = await response.json()
|
||||
|
|
|
|||
|
|
@ -4,15 +4,11 @@ import {
|
|||
UnstructuredLoaderOptions,
|
||||
UnstructuredLoaderStrategy,
|
||||
SkipInferTableTypes,
|
||||
HiResModelName,
|
||||
UnstructuredLoader as LCUnstructuredLoader
|
||||
HiResModelName
|
||||
} from '@langchain/community/document_loaders/fs/unstructured'
|
||||
import { getCredentialData, getCredentialParam, handleEscapeCharacters } from '../../../src/utils'
|
||||
import { getFileFromStorage, INodeOutputsValue } from '../../../src'
|
||||
import { UnstructuredLoader } from './Unstructured'
|
||||
import { isPathTraversal } from '../../../src/validator'
|
||||
import sanitize from 'sanitize-filename'
|
||||
import path from 'path'
|
||||
|
||||
class UnstructuredFile_DocumentLoaders implements INode {
|
||||
label: string
|
||||
|
|
@ -44,17 +40,6 @@ class UnstructuredFile_DocumentLoaders implements INode {
|
|||
optional: true
|
||||
}
|
||||
this.inputs = [
|
||||
/** Deprecated
|
||||
{
|
||||
label: 'File Path',
|
||||
name: 'filePath',
|
||||
type: 'string',
|
||||
placeholder: '',
|
||||
optional: true,
|
||||
warning:
|
||||
'Use the File Upload instead of File path. If file is uploaded, this path is ignored. Path will be deprecated in future releases.'
|
||||
},
|
||||
*/
|
||||
{
|
||||
label: 'Files Upload',
|
||||
name: 'fileObject',
|
||||
|
|
@ -455,7 +440,6 @@ class UnstructuredFile_DocumentLoaders implements INode {
|
|||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const filePath = nodeData.inputs?.filePath as string
|
||||
const unstructuredAPIUrl = nodeData.inputs?.unstructuredAPIUrl as string
|
||||
const strategy = nodeData.inputs?.strategy as UnstructuredLoaderStrategy
|
||||
const encoding = nodeData.inputs?.encoding as string
|
||||
|
|
@ -560,37 +544,8 @@ class UnstructuredFile_DocumentLoaders implements INode {
|
|||
docs.push(...loaderDocs)
|
||||
}
|
||||
}
|
||||
} else if (filePath) {
|
||||
if (!filePath || typeof filePath !== 'string') {
|
||||
throw new Error('Invalid file path format')
|
||||
}
|
||||
|
||||
if (isPathTraversal(filePath)) {
|
||||
throw new Error('Invalid path characters detected in filePath - path traversal not allowed')
|
||||
}
|
||||
|
||||
const parsedPath = path.parse(filePath)
|
||||
const sanitizedFilename = sanitize(parsedPath.base)
|
||||
|
||||
if (!sanitizedFilename || sanitizedFilename.trim() === '') {
|
||||
throw new Error('Invalid filename after sanitization')
|
||||
}
|
||||
|
||||
const sanitizedFilePath = path.join(parsedPath.dir, sanitizedFilename)
|
||||
|
||||
if (!path.isAbsolute(sanitizedFilePath)) {
|
||||
throw new Error('File path must be absolute')
|
||||
}
|
||||
|
||||
if (sanitizedFilePath.includes('..')) {
|
||||
throw new Error('Invalid file path - directory traversal not allowed')
|
||||
}
|
||||
|
||||
const loader = new LCUnstructuredLoader(sanitizedFilePath, obj)
|
||||
const loaderDocs = await loader.load()
|
||||
docs.push(...loaderDocs)
|
||||
} else {
|
||||
throw new Error('File path or File upload is required')
|
||||
throw new Error('File upload is required')
|
||||
}
|
||||
|
||||
if (metadata) {
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
/*
|
||||
* Uncomment this if you want to use the UnstructuredFolder to load a folder from the file system
|
||||
|
||||
import { omit } from 'lodash'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import {
|
||||
|
|
@ -516,3 +519,4 @@ class UnstructuredFolder_DocumentLoaders implements INode {
|
|||
}
|
||||
|
||||
module.exports = { nodeClass: UnstructuredFolder_DocumentLoaders }
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -96,7 +96,7 @@ class AWSBedrockEmbedding_Embeddings implements INode {
|
|||
{
|
||||
label: 'Max AWS API retries',
|
||||
name: 'maxRetries',
|
||||
description: 'This will limit the nubmer of AWS API for Titan model embeddings call retries. Used to avoid throttling.',
|
||||
description: 'This will limit the number of AWS API for Titan model embeddings call retries. Used to avoid throttling.',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
default: 5,
|
||||
|
|
|
|||
|
|
@ -23,24 +23,22 @@ export class HuggingFaceInferenceEmbeddings extends Embeddings implements Huggin
|
|||
this.model = fields?.model ?? 'sentence-transformers/distilbert-base-nli-mean-tokens'
|
||||
this.apiKey = fields?.apiKey ?? getEnvironmentVariable('HUGGINGFACEHUB_API_KEY')
|
||||
this.endpoint = fields?.endpoint ?? ''
|
||||
this.client = new HfInference(this.apiKey)
|
||||
if (this.endpoint) this.client.endpoint(this.endpoint)
|
||||
const hf = new HfInference(this.apiKey)
|
||||
// v4 uses Inference Providers by default; only override if custom endpoint provided
|
||||
this.client = this.endpoint ? hf.endpoint(this.endpoint) : hf
|
||||
}
|
||||
|
||||
async _embed(texts: string[]): Promise<number[][]> {
|
||||
// replace newlines, which can negatively affect performance.
|
||||
const clean = texts.map((text) => text.replace(/\n/g, ' '))
|
||||
const hf = new HfInference(this.apiKey)
|
||||
const obj: any = {
|
||||
inputs: clean
|
||||
}
|
||||
if (this.endpoint) {
|
||||
hf.endpoint(this.endpoint)
|
||||
} else {
|
||||
if (!this.endpoint) {
|
||||
obj.model = this.model
|
||||
}
|
||||
|
||||
const res = await this.caller.callWithOptions({}, hf.featureExtraction.bind(hf), obj)
|
||||
const res = await this.caller.callWithOptions({}, this.client.featureExtraction.bind(this.client), obj)
|
||||
return res as number[][]
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ class SubQuestionQueryEngine_LlamaIndex implements INode {
|
|||
this.icon = 'subQueryEngine.svg'
|
||||
this.category = 'Engine'
|
||||
this.description =
|
||||
'Breaks complex query into sub questions for each relevant data source, then gather all the intermediate reponses and synthesizes a final response'
|
||||
'Breaks complex query into sub questions for each relevant data source, then gather all the intermediate responses and synthesizes a final response'
|
||||
this.baseClasses = [this.type, 'BaseQueryEngine']
|
||||
this.tags = ['LlamaIndex']
|
||||
this.inputs = [
|
||||
|
|
|
|||
|
|
@ -78,6 +78,8 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
|||
async _call(prompt: string, options: this['ParsedCallOptions']): Promise<string> {
|
||||
const { HfInference } = await HuggingFaceInference.imports()
|
||||
const hf = new HfInference(this.apiKey)
|
||||
// v4 uses Inference Providers by default; only override if custom endpoint provided
|
||||
const hfClient = this.endpoint ? hf.endpoint(this.endpoint) : hf
|
||||
const obj: any = {
|
||||
parameters: {
|
||||
// make it behave similar to openai, returning only the generated text
|
||||
|
|
@ -90,12 +92,10 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
|||
},
|
||||
inputs: prompt
|
||||
}
|
||||
if (this.endpoint) {
|
||||
hf.endpoint(this.endpoint)
|
||||
} else {
|
||||
if (!this.endpoint) {
|
||||
obj.model = this.model
|
||||
}
|
||||
const res = await this.caller.callWithOptions({ signal: options.signal }, hf.textGeneration.bind(hf), obj)
|
||||
const res = await this.caller.callWithOptions({ signal: options.signal }, hfClient.textGeneration.bind(hfClient), obj)
|
||||
return res.generated_text
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ import { ChatOpenAI } from '../../chatmodels/ChatOpenAI/FlowiseChatOpenAI'
|
|||
import { ChatAnthropic } from '../../chatmodels/ChatAnthropic/FlowiseChatAnthropic'
|
||||
import { addImagesToMessages, llmSupportsVision } from '../../../src/multiModalUtils'
|
||||
import { ChatGoogleGenerativeAI } from '../../chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI'
|
||||
import { AzureChatOpenAI } from '../../chatmodels/AzureChatOpenAI/FlowiseAzureChatOpenAI'
|
||||
|
||||
const sysPrompt = `You are a supervisor tasked with managing a conversation between the following workers: {team_members}.
|
||||
Given the following user request, respond with the worker to act next.
|
||||
|
|
@ -242,7 +243,7 @@ class Supervisor_MultiAgents implements INode {
|
|||
}
|
||||
}
|
||||
})
|
||||
} else if (llm instanceof ChatOpenAI) {
|
||||
} else if (llm instanceof ChatOpenAI || llm instanceof AzureChatOpenAI) {
|
||||
let prompt = ChatPromptTemplate.fromMessages([
|
||||
['system', systemPrompt],
|
||||
new MessagesPlaceholder('messages'),
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ return [
|
|||
tool_calls: [
|
||||
{
|
||||
id: "12345",
|
||||
name: "calulator",
|
||||
name: "calculator",
|
||||
args: {
|
||||
number1: 333382,
|
||||
number2: 1932,
|
||||
|
|
|
|||
|
|
@ -62,7 +62,6 @@ class MySQLRecordManager_RecordManager implements INode {
|
|||
label: 'Namespace',
|
||||
name: 'namespace',
|
||||
type: 'string',
|
||||
description: 'If not specified, chatflowid will be used',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
|
|
@ -219,7 +218,16 @@ class MySQLRecordManager implements RecordManagerInterface {
|
|||
unique key \`unique_key_namespace\` (\`key\`,
|
||||
\`namespace\`));`)
|
||||
|
||||
const columns = [`updated_at`, `key`, `namespace`, `group_id`]
|
||||
// Add doc_id column if it doesn't exist (migration for existing tables)
|
||||
const checkColumn = await queryRunner.manager.query(
|
||||
`SELECT COUNT(1) ColumnExists FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE table_schema=DATABASE() AND table_name='${tableName}' AND column_name='doc_id';`
|
||||
)
|
||||
if (checkColumn[0].ColumnExists === 0) {
|
||||
await queryRunner.manager.query(`ALTER TABLE \`${tableName}\` ADD COLUMN \`doc_id\` longtext;`)
|
||||
}
|
||||
|
||||
const columns = [`updated_at`, `key`, `namespace`, `group_id`, `doc_id`]
|
||||
for (const column of columns) {
|
||||
// MySQL does not support 'IF NOT EXISTS' function for Index
|
||||
const Check = await queryRunner.manager.query(
|
||||
|
|
@ -261,7 +269,7 @@ class MySQLRecordManager implements RecordManagerInterface {
|
|||
}
|
||||
}
|
||||
|
||||
async update(keys: string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||
async update(keys: Array<{ uid: string; docId: string }> | string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||
if (keys.length === 0) {
|
||||
return
|
||||
}
|
||||
|
|
@ -277,23 +285,23 @@ class MySQLRecordManager implements RecordManagerInterface {
|
|||
throw new Error(`Time sync issue with database ${updatedAt} < ${timeAtLeast}`)
|
||||
}
|
||||
|
||||
const groupIds = _groupIds ?? keys.map(() => null)
|
||||
// Handle both new format (objects with uid and docId) and old format (strings)
|
||||
const isNewFormat = keys.length > 0 && typeof keys[0] === 'object' && 'uid' in keys[0]
|
||||
const keyStrings = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.uid) : (keys as string[])
|
||||
const docIds = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.docId) : keys.map(() => null)
|
||||
|
||||
if (groupIds.length !== keys.length) {
|
||||
throw new Error(`Number of keys (${keys.length}) does not match number of group_ids (${groupIds.length})`)
|
||||
const groupIds = _groupIds ?? keyStrings.map(() => null)
|
||||
|
||||
if (groupIds.length !== keyStrings.length) {
|
||||
throw new Error(`Number of keys (${keyStrings.length}) does not match number of group_ids (${groupIds.length})`)
|
||||
}
|
||||
|
||||
const recordsToUpsert = keys.map((key, i) => [
|
||||
key,
|
||||
this.namespace,
|
||||
updatedAt,
|
||||
groupIds[i] ?? null // Ensure groupIds[i] is null if undefined
|
||||
])
|
||||
const recordsToUpsert = keyStrings.map((key, i) => [key, this.namespace, updatedAt, groupIds[i] ?? null, docIds[i] ?? null])
|
||||
|
||||
const query = `
|
||||
INSERT INTO \`${tableName}\` (\`key\`, \`namespace\`, \`updated_at\`, \`group_id\`)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE \`updated_at\` = VALUES(\`updated_at\`)`
|
||||
INSERT INTO \`${tableName}\` (\`key\`, \`namespace\`, \`updated_at\`, \`group_id\`, \`doc_id\`)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE \`updated_at\` = VALUES(\`updated_at\`), \`doc_id\` = VALUES(\`doc_id\`)`
|
||||
|
||||
// To handle multiple files upsert
|
||||
try {
|
||||
|
|
@ -349,13 +357,13 @@ class MySQLRecordManager implements RecordManagerInterface {
|
|||
}
|
||||
}
|
||||
|
||||
async listKeys(options?: ListKeyOptions): Promise<string[]> {
|
||||
async listKeys(options?: ListKeyOptions & { docId?: string }): Promise<string[]> {
|
||||
const dataSource = await this.getDataSource()
|
||||
const queryRunner = dataSource.createQueryRunner()
|
||||
const tableName = this.sanitizeTableName(this.tableName)
|
||||
|
||||
try {
|
||||
const { before, after, limit, groupIds } = options ?? {}
|
||||
const { before, after, limit, groupIds, docId } = options ?? {}
|
||||
let query = `SELECT \`key\` FROM \`${tableName}\` WHERE \`namespace\` = ?`
|
||||
const values: (string | number | string[])[] = [this.namespace]
|
||||
|
||||
|
|
@ -382,6 +390,11 @@ class MySQLRecordManager implements RecordManagerInterface {
|
|||
values.push(...groupIds.filter((gid): gid is string => gid !== null))
|
||||
}
|
||||
|
||||
if (docId) {
|
||||
query += ` AND \`doc_id\` = ?`
|
||||
values.push(docId)
|
||||
}
|
||||
|
||||
query += ';'
|
||||
|
||||
// Directly using try/catch with async/await for cleaner flow
|
||||
|
|
|
|||
|
|
@ -78,7 +78,6 @@ class PostgresRecordManager_RecordManager implements INode {
|
|||
label: 'Namespace',
|
||||
name: 'namespace',
|
||||
type: 'string',
|
||||
description: 'If not specified, chatflowid will be used',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
|
|
@ -241,6 +240,19 @@ class PostgresRecordManager implements RecordManagerInterface {
|
|||
CREATE INDEX IF NOT EXISTS namespace_index ON "${tableName}" (namespace);
|
||||
CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
||||
|
||||
// Add doc_id column if it doesn't exist (migration for existing tables)
|
||||
await queryRunner.manager.query(`
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = '${tableName}' AND column_name = 'doc_id'
|
||||
) THEN
|
||||
ALTER TABLE "${tableName}" ADD COLUMN doc_id TEXT;
|
||||
CREATE INDEX IF NOT EXISTS doc_id_index ON "${tableName}" (doc_id);
|
||||
END IF;
|
||||
END $$;`)
|
||||
|
||||
await queryRunner.release()
|
||||
} catch (e: any) {
|
||||
// This error indicates that the table already exists
|
||||
|
|
@ -286,7 +298,7 @@ class PostgresRecordManager implements RecordManagerInterface {
|
|||
return `(${placeholders.join(', ')})`
|
||||
}
|
||||
|
||||
async update(keys: string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||
async update(keys: Array<{ uid: string; docId: string }> | string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||
if (keys.length === 0) {
|
||||
return
|
||||
}
|
||||
|
|
@ -302,17 +314,22 @@ class PostgresRecordManager implements RecordManagerInterface {
|
|||
throw new Error(`Time sync issue with database ${updatedAt} < ${timeAtLeast}`)
|
||||
}
|
||||
|
||||
const groupIds = _groupIds ?? keys.map(() => null)
|
||||
// Handle both new format (objects with uid and docId) and old format (strings)
|
||||
const isNewFormat = keys.length > 0 && typeof keys[0] === 'object' && 'uid' in keys[0]
|
||||
const keyStrings = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.uid) : (keys as string[])
|
||||
const docIds = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.docId) : keys.map(() => null)
|
||||
|
||||
if (groupIds.length !== keys.length) {
|
||||
throw new Error(`Number of keys (${keys.length}) does not match number of group_ids ${groupIds.length})`)
|
||||
const groupIds = _groupIds ?? keyStrings.map(() => null)
|
||||
|
||||
if (groupIds.length !== keyStrings.length) {
|
||||
throw new Error(`Number of keys (${keyStrings.length}) does not match number of group_ids ${groupIds.length})`)
|
||||
}
|
||||
|
||||
const recordsToUpsert = keys.map((key, i) => [key, this.namespace, updatedAt, groupIds[i]])
|
||||
const recordsToUpsert = keyStrings.map((key, i) => [key, this.namespace, updatedAt, groupIds[i], docIds[i]])
|
||||
|
||||
const valuesPlaceholders = recordsToUpsert.map((_, j) => this.generatePlaceholderForRowAt(j, recordsToUpsert[0].length)).join(', ')
|
||||
|
||||
const query = `INSERT INTO "${tableName}" (key, namespace, updated_at, group_id) VALUES ${valuesPlaceholders} ON CONFLICT (key, namespace) DO UPDATE SET updated_at = EXCLUDED.updated_at;`
|
||||
const query = `INSERT INTO "${tableName}" (key, namespace, updated_at, group_id, doc_id) VALUES ${valuesPlaceholders} ON CONFLICT (key, namespace) DO UPDATE SET updated_at = EXCLUDED.updated_at, doc_id = EXCLUDED.doc_id;`
|
||||
try {
|
||||
await queryRunner.manager.query(query, recordsToUpsert.flat())
|
||||
await queryRunner.release()
|
||||
|
|
@ -351,8 +368,8 @@ class PostgresRecordManager implements RecordManagerInterface {
|
|||
}
|
||||
}
|
||||
|
||||
async listKeys(options?: ListKeyOptions): Promise<string[]> {
|
||||
const { before, after, limit, groupIds } = options ?? {}
|
||||
async listKeys(options?: ListKeyOptions & { docId?: string }): Promise<string[]> {
|
||||
const { before, after, limit, groupIds, docId } = options ?? {}
|
||||
const tableName = this.sanitizeTableName(this.tableName)
|
||||
|
||||
let query = `SELECT key FROM "${tableName}" WHERE namespace = $1`
|
||||
|
|
@ -383,6 +400,12 @@ class PostgresRecordManager implements RecordManagerInterface {
|
|||
index += 1
|
||||
}
|
||||
|
||||
if (docId) {
|
||||
values.push(docId)
|
||||
query += ` AND doc_id = $${index}`
|
||||
index += 1
|
||||
}
|
||||
|
||||
query += ';'
|
||||
|
||||
const dataSource = await this.getDataSource()
|
||||
|
|
|
|||
|
|
@ -51,7 +51,6 @@ class SQLiteRecordManager_RecordManager implements INode {
|
|||
label: 'Namespace',
|
||||
name: 'namespace',
|
||||
type: 'string',
|
||||
description: 'If not specified, chatflowid will be used',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
|
|
@ -198,6 +197,15 @@ CREATE INDEX IF NOT EXISTS key_index ON "${tableName}" (key);
|
|||
CREATE INDEX IF NOT EXISTS namespace_index ON "${tableName}" (namespace);
|
||||
CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
||||
|
||||
// Add doc_id column if it doesn't exist (migration for existing tables)
|
||||
const checkColumn = await queryRunner.manager.query(
|
||||
`SELECT COUNT(*) as count FROM pragma_table_info('${tableName}') WHERE name='doc_id';`
|
||||
)
|
||||
if (checkColumn[0].count === 0) {
|
||||
await queryRunner.manager.query(`ALTER TABLE "${tableName}" ADD COLUMN doc_id TEXT;`)
|
||||
await queryRunner.manager.query(`CREATE INDEX IF NOT EXISTS doc_id_index ON "${tableName}" (doc_id);`)
|
||||
}
|
||||
|
||||
await queryRunner.release()
|
||||
} catch (e: any) {
|
||||
// This error indicates that the table already exists
|
||||
|
|
@ -228,7 +236,7 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
|||
}
|
||||
}
|
||||
|
||||
async update(keys: string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||
async update(keys: Array<{ uid: string; docId: string }> | string[], updateOptions?: UpdateOptions): Promise<void> {
|
||||
if (keys.length === 0) {
|
||||
return
|
||||
}
|
||||
|
|
@ -243,23 +251,23 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
|||
throw new Error(`Time sync issue with database ${updatedAt} < ${timeAtLeast}`)
|
||||
}
|
||||
|
||||
const groupIds = _groupIds ?? keys.map(() => null)
|
||||
// Handle both new format (objects with uid and docId) and old format (strings)
|
||||
const isNewFormat = keys.length > 0 && typeof keys[0] === 'object' && 'uid' in keys[0]
|
||||
const keyStrings = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.uid) : (keys as string[])
|
||||
const docIds = isNewFormat ? (keys as Array<{ uid: string; docId: string }>).map((k) => k.docId) : keys.map(() => null)
|
||||
|
||||
if (groupIds.length !== keys.length) {
|
||||
throw new Error(`Number of keys (${keys.length}) does not match number of group_ids (${groupIds.length})`)
|
||||
const groupIds = _groupIds ?? keyStrings.map(() => null)
|
||||
|
||||
if (groupIds.length !== keyStrings.length) {
|
||||
throw new Error(`Number of keys (${keyStrings.length}) does not match number of group_ids (${groupIds.length})`)
|
||||
}
|
||||
|
||||
const recordsToUpsert = keys.map((key, i) => [
|
||||
key,
|
||||
this.namespace,
|
||||
updatedAt,
|
||||
groupIds[i] ?? null // Ensure groupIds[i] is null if undefined
|
||||
])
|
||||
const recordsToUpsert = keyStrings.map((key, i) => [key, this.namespace, updatedAt, groupIds[i] ?? null, docIds[i] ?? null])
|
||||
|
||||
const query = `
|
||||
INSERT INTO "${tableName}" (key, namespace, updated_at, group_id)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT (key, namespace) DO UPDATE SET updated_at = excluded.updated_at`
|
||||
INSERT INTO "${tableName}" (key, namespace, updated_at, group_id, doc_id)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON CONFLICT (key, namespace) DO UPDATE SET updated_at = excluded.updated_at, doc_id = excluded.doc_id`
|
||||
|
||||
try {
|
||||
// To handle multiple files upsert
|
||||
|
|
@ -314,8 +322,8 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
|||
}
|
||||
}
|
||||
|
||||
async listKeys(options?: ListKeyOptions): Promise<string[]> {
|
||||
const { before, after, limit, groupIds } = options ?? {}
|
||||
async listKeys(options?: ListKeyOptions & { docId?: string }): Promise<string[]> {
|
||||
const { before, after, limit, groupIds, docId } = options ?? {}
|
||||
const tableName = this.sanitizeTableName(this.tableName)
|
||||
|
||||
let query = `SELECT key FROM "${tableName}" WHERE namespace = ?`
|
||||
|
|
@ -344,6 +352,11 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`)
|
|||
values.push(...groupIds.filter((gid): gid is string => gid !== null))
|
||||
}
|
||||
|
||||
if (docId) {
|
||||
query += ` AND doc_id = ?`
|
||||
values.push(docId)
|
||||
}
|
||||
|
||||
query += ';'
|
||||
|
||||
const dataSource = await this.getDataSource()
|
||||
|
|
|
|||
|
|
@ -136,17 +136,17 @@ class Custom_MCP implements INode {
|
|||
}
|
||||
|
||||
let sandbox: ICommonObject = {}
|
||||
const workspaceId = options?.searchOptions?.workspaceId?._value || options?.workspaceId
|
||||
|
||||
if (mcpServerConfig.includes('$vars')) {
|
||||
const appDataSource = options.appDataSource as DataSource
|
||||
const databaseEntities = options.databaseEntities as IDatabaseEntity
|
||||
|
||||
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
||||
// If options.workspaceId is not set, create a new options object with the workspaceId for getVars.
|
||||
const optionsWithWorkspaceId = options.workspaceId ? options : { ...options, workspaceId }
|
||||
const variables = await getVars(appDataSource, databaseEntities, nodeData, optionsWithWorkspaceId)
|
||||
sandbox['$vars'] = prepareSandboxVars(variables)
|
||||
}
|
||||
|
||||
const workspaceId = options?.searchOptions?.workspaceId?._value || options?.workspaceId
|
||||
|
||||
let canonicalConfig
|
||||
try {
|
||||
canonicalConfig = JSON.parse(mcpServerConfig)
|
||||
|
|
|
|||
|
|
@ -1,147 +0,0 @@
|
|||
import { z } from 'zod'
|
||||
import path from 'path'
|
||||
import { StructuredTool, ToolParams } from '@langchain/core/tools'
|
||||
import { Serializable } from '@langchain/core/load/serializable'
|
||||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getUserHome } from '../../../src/utils'
|
||||
import { SecureFileStore, FileSecurityConfig } from '../../../src/SecureFileStore'
|
||||
|
||||
abstract class BaseFileStore extends Serializable {
|
||||
abstract readFile(path: string): Promise<string>
|
||||
abstract writeFile(path: string, contents: string): Promise<void>
|
||||
}
|
||||
|
||||
class ReadFile_Tools implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
warning: string
|
||||
|
||||
constructor() {
|
||||
this.label = 'Read File'
|
||||
this.name = 'readFile'
|
||||
this.version = 2.0
|
||||
this.type = 'ReadFile'
|
||||
this.icon = 'readfile.svg'
|
||||
this.category = 'Tools'
|
||||
this.warning = 'This tool can be used to read files from the disk. It is recommended to use this tool with caution.'
|
||||
this.description = 'Read file from disk'
|
||||
this.baseClasses = [this.type, 'Tool', ...getBaseClasses(ReadFileTool)]
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Workspace Path',
|
||||
name: 'workspacePath',
|
||||
placeholder: `C:\\Users\\User\\MyProject`,
|
||||
type: 'string',
|
||||
description: 'Base workspace directory for file operations. All file paths will be relative to this directory.',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Enforce Workspace Boundaries',
|
||||
name: 'enforceWorkspaceBoundaries',
|
||||
type: 'boolean',
|
||||
description: 'When enabled, restricts file access to the workspace directory for security. Recommended: true',
|
||||
default: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Max File Size (MB)',
|
||||
name: 'maxFileSize',
|
||||
type: 'number',
|
||||
description: 'Maximum file size in megabytes that can be read',
|
||||
default: 10,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Allowed Extensions',
|
||||
name: 'allowedExtensions',
|
||||
type: 'string',
|
||||
description: 'Comma-separated list of allowed file extensions (e.g., .txt,.json,.md). Leave empty to allow all.',
|
||||
placeholder: '.txt,.json,.md,.py,.js',
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
const workspacePath = nodeData.inputs?.workspacePath as string
|
||||
const enforceWorkspaceBoundaries = nodeData.inputs?.enforceWorkspaceBoundaries !== false // Default to true
|
||||
const maxFileSize = nodeData.inputs?.maxFileSize as number
|
||||
const allowedExtensions = nodeData.inputs?.allowedExtensions as string
|
||||
|
||||
// Parse allowed extensions
|
||||
const allowedExtensionsList = allowedExtensions ? allowedExtensions.split(',').map((ext) => ext.trim().toLowerCase()) : []
|
||||
|
||||
let store: BaseFileStore
|
||||
|
||||
if (workspacePath) {
|
||||
// Create secure file store with workspace boundaries
|
||||
const config: FileSecurityConfig = {
|
||||
workspacePath,
|
||||
enforceWorkspaceBoundaries,
|
||||
maxFileSize: maxFileSize ? maxFileSize * 1024 * 1024 : undefined, // Convert MB to bytes
|
||||
allowedExtensions: allowedExtensionsList.length > 0 ? allowedExtensionsList : undefined
|
||||
}
|
||||
store = new SecureFileStore(config)
|
||||
} else {
|
||||
// Fallback to current working directory with security warnings
|
||||
if (enforceWorkspaceBoundaries) {
|
||||
const fallbackWorkspacePath = path.join(getUserHome(), '.flowise')
|
||||
console.warn(`[ReadFile] No workspace path specified, using ${fallbackWorkspacePath} with security restrictions`)
|
||||
store = new SecureFileStore({
|
||||
workspacePath: fallbackWorkspacePath,
|
||||
enforceWorkspaceBoundaries: true,
|
||||
maxFileSize: maxFileSize ? maxFileSize * 1024 * 1024 : undefined,
|
||||
allowedExtensions: allowedExtensionsList.length > 0 ? allowedExtensionsList : undefined
|
||||
})
|
||||
} else {
|
||||
console.warn('[ReadFile] SECURITY WARNING: Workspace boundaries disabled - unrestricted file access enabled')
|
||||
store = SecureFileStore.createUnsecure()
|
||||
}
|
||||
}
|
||||
|
||||
return new ReadFileTool({ store })
|
||||
}
|
||||
}
|
||||
|
||||
interface ReadFileParams extends ToolParams {
|
||||
store: BaseFileStore
|
||||
}
|
||||
|
||||
/**
|
||||
* Class for reading files from the disk. Extends the StructuredTool
|
||||
* class.
|
||||
*/
|
||||
export class ReadFileTool extends StructuredTool {
|
||||
static lc_name() {
|
||||
return 'ReadFileTool'
|
||||
}
|
||||
|
||||
schema = z.object({
|
||||
file_path: z.string().describe('name of file')
|
||||
}) as any
|
||||
|
||||
name = 'read_file'
|
||||
|
||||
description = 'Read file from disk'
|
||||
|
||||
store: BaseFileStore
|
||||
|
||||
constructor({ store }: ReadFileParams) {
|
||||
super(...arguments)
|
||||
|
||||
this.store = store
|
||||
}
|
||||
|
||||
async _call({ file_path }: z.infer<typeof this.schema>) {
|
||||
return await this.store.readFile(file_path)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ReadFile_Tools }
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M18 5H9C7.89543 5 7 5.89543 7 7V25C7 26.1046 7.89543 27 9 27H12M18 5L25 12M18 5V12H25M25 12V25C25 26.1046 24.1046 27 23 27H20" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M16 17V29M16 17L13 20.1361M16 17L19 20.1361" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 455 B |
|
|
@ -1,149 +0,0 @@
|
|||
import { z } from 'zod'
|
||||
import path from 'path'
|
||||
import { StructuredTool, ToolParams } from '@langchain/core/tools'
|
||||
import { Serializable } from '@langchain/core/load/serializable'
|
||||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getUserHome } from '../../../src/utils'
|
||||
import { SecureFileStore, FileSecurityConfig } from '../../../src/SecureFileStore'
|
||||
|
||||
abstract class BaseFileStore extends Serializable {
|
||||
abstract readFile(path: string): Promise<string>
|
||||
abstract writeFile(path: string, contents: string): Promise<void>
|
||||
}
|
||||
|
||||
class WriteFile_Tools implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
warning: string
|
||||
|
||||
constructor() {
|
||||
this.label = 'Write File'
|
||||
this.name = 'writeFile'
|
||||
this.version = 2.0
|
||||
this.type = 'WriteFile'
|
||||
this.icon = 'writefile.svg'
|
||||
this.category = 'Tools'
|
||||
this.warning = 'This tool can be used to write files to the disk. It is recommended to use this tool with caution.'
|
||||
this.description = 'Write file to disk'
|
||||
this.baseClasses = [this.type, 'Tool', ...getBaseClasses(WriteFileTool)]
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Workspace Path',
|
||||
name: 'workspacePath',
|
||||
placeholder: `C:\\Users\\User\\MyProject`,
|
||||
type: 'string',
|
||||
description: 'Base workspace directory for file operations. All file paths will be relative to this directory.',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Enforce Workspace Boundaries',
|
||||
name: 'enforceWorkspaceBoundaries',
|
||||
type: 'boolean',
|
||||
description: 'When enabled, restricts file access to the workspace directory for security. Recommended: true',
|
||||
default: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Max File Size (MB)',
|
||||
name: 'maxFileSize',
|
||||
type: 'number',
|
||||
description: 'Maximum file size in megabytes that can be written',
|
||||
default: 10,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Allowed Extensions',
|
||||
name: 'allowedExtensions',
|
||||
type: 'string',
|
||||
description: 'Comma-separated list of allowed file extensions (e.g., .txt,.json,.md). Leave empty to allow all.',
|
||||
placeholder: '.txt,.json,.md,.py,.js',
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
const workspacePath = nodeData.inputs?.workspacePath as string
|
||||
const enforceWorkspaceBoundaries = nodeData.inputs?.enforceWorkspaceBoundaries !== false // Default to true
|
||||
const maxFileSize = nodeData.inputs?.maxFileSize as number
|
||||
const allowedExtensions = nodeData.inputs?.allowedExtensions as string
|
||||
|
||||
// Parse allowed extensions
|
||||
const allowedExtensionsList = allowedExtensions ? allowedExtensions.split(',').map((ext) => ext.trim().toLowerCase()) : []
|
||||
|
||||
let store: BaseFileStore
|
||||
|
||||
if (workspacePath) {
|
||||
// Create secure file store with workspace boundaries
|
||||
const config: FileSecurityConfig = {
|
||||
workspacePath,
|
||||
enforceWorkspaceBoundaries,
|
||||
maxFileSize: maxFileSize ? maxFileSize * 1024 * 1024 : undefined, // Convert MB to bytes
|
||||
allowedExtensions: allowedExtensionsList.length > 0 ? allowedExtensionsList : undefined
|
||||
}
|
||||
store = new SecureFileStore(config)
|
||||
} else {
|
||||
// Fallback to current working directory with security warnings
|
||||
if (enforceWorkspaceBoundaries) {
|
||||
const fallbackWorkspacePath = path.join(getUserHome(), '.flowise')
|
||||
console.warn(`[WriteFile] No workspace path specified, using ${fallbackWorkspacePath} with security restrictions`)
|
||||
store = new SecureFileStore({
|
||||
workspacePath: fallbackWorkspacePath,
|
||||
enforceWorkspaceBoundaries: true,
|
||||
maxFileSize: maxFileSize ? maxFileSize * 1024 * 1024 : undefined,
|
||||
allowedExtensions: allowedExtensionsList.length > 0 ? allowedExtensionsList : undefined
|
||||
})
|
||||
} else {
|
||||
console.warn('[WriteFile] SECURITY WARNING: Workspace boundaries disabled - unrestricted file access enabled')
|
||||
store = SecureFileStore.createUnsecure()
|
||||
}
|
||||
}
|
||||
|
||||
return new WriteFileTool({ store })
|
||||
}
|
||||
}
|
||||
|
||||
interface WriteFileParams extends ToolParams {
|
||||
store: BaseFileStore
|
||||
}
|
||||
|
||||
/**
|
||||
* Class for writing data to files on the disk. Extends the StructuredTool
|
||||
* class.
|
||||
*/
|
||||
export class WriteFileTool extends StructuredTool {
|
||||
static lc_name() {
|
||||
return 'WriteFileTool'
|
||||
}
|
||||
|
||||
schema = z.object({
|
||||
file_path: z.string().describe('name of file'),
|
||||
text: z.string().describe('text to write to file')
|
||||
}) as any
|
||||
|
||||
name = 'write_file'
|
||||
|
||||
description = 'Write file to disk'
|
||||
|
||||
store: BaseFileStore
|
||||
|
||||
constructor({ store, ...rest }: WriteFileParams) {
|
||||
super(rest)
|
||||
|
||||
this.store = store
|
||||
}
|
||||
|
||||
async _call({ file_path, text }: z.infer<typeof this.schema>) {
|
||||
await this.store.writeFile(file_path, text)
|
||||
return `File written to ${file_path} successfully.`
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: WriteFile_Tools }
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M25 18V25C25 26.1046 24.1046 27 23 27H9C7.89543 27 7 26.1046 7 25V7C7 5.89543 7.89543 5 9 5H18L19 6" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M12 19.3284V22H14.6716C15.202 22 15.7107 21.7893 16.0858 21.4142L24.5858 12.9142C25.3668 12.1332 25.3668 10.8668 24.5858 10.0858L23.9142 9.41421C23.1332 8.63316 21.8668 8.63317 21.0858 9.41421L12.5858 17.9142C12.2107 18.2893 12 18.798 12 19.3284Z" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 632 B |
|
|
@ -84,11 +84,16 @@ class CustomFunction_Utilities implements INode {
|
|||
|
||||
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
||||
const flow = {
|
||||
input,
|
||||
chatflowId: options.chatflowid,
|
||||
sessionId: options.sessionId,
|
||||
chatId: options.chatId,
|
||||
rawOutput: options.rawOutput || '',
|
||||
input
|
||||
rawOutput: options.postProcessing?.rawOutput || '',
|
||||
chatHistory: options.postProcessing?.chatHistory || [],
|
||||
sourceDocuments: options.postProcessing?.sourceDocuments,
|
||||
usedTools: options.postProcessing?.usedTools,
|
||||
artifacts: options.postProcessing?.artifacts,
|
||||
fileAnnotations: options.postProcessing?.fileAnnotations
|
||||
}
|
||||
|
||||
let inputVars: ICommonObject = {}
|
||||
|
|
|
|||
|
|
@ -186,7 +186,11 @@ class Chroma_VectorStores implements INode {
|
|||
const vectorStoreName = collectionName
|
||||
await recordManager.createSchema()
|
||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||
const keys: string[] = await recordManager.listKeys({})
|
||||
const filterKeys: ICommonObject = {}
|
||||
if (options.docId) {
|
||||
filterKeys.docId = options.docId
|
||||
}
|
||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
||||
|
||||
const chromaStore = new ChromaExtended(embeddings, obj)
|
||||
|
||||
|
|
|
|||
|
|
@ -198,7 +198,11 @@ class Elasticsearch_VectorStores implements INode {
|
|||
const vectorStoreName = indexName
|
||||
await recordManager.createSchema()
|
||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||
const keys: string[] = await recordManager.listKeys({})
|
||||
const filterKeys: ICommonObject = {}
|
||||
if (options.docId) {
|
||||
filterKeys.docId = options.docId
|
||||
}
|
||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
||||
|
||||
await vectorStore.delete({ ids: keys })
|
||||
await recordManager.deleteKeys(keys)
|
||||
|
|
|
|||
|
|
@ -212,7 +212,11 @@ class Pinecone_VectorStores implements INode {
|
|||
const vectorStoreName = pineconeNamespace
|
||||
await recordManager.createSchema()
|
||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||
const keys: string[] = await recordManager.listKeys({})
|
||||
const filterKeys: ICommonObject = {}
|
||||
if (options.docId) {
|
||||
filterKeys.docId = options.docId
|
||||
}
|
||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
||||
|
||||
await pineconeStore.delete({ ids: keys })
|
||||
await recordManager.deleteKeys(keys)
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ class Postgres_VectorStores implements INode {
|
|||
constructor() {
|
||||
this.label = 'Postgres'
|
||||
this.name = 'postgres'
|
||||
this.version = 7.0
|
||||
this.version = 7.1
|
||||
this.type = 'Postgres'
|
||||
this.icon = 'postgres.svg'
|
||||
this.category = 'Vector Stores'
|
||||
|
|
@ -173,6 +173,15 @@ class Postgres_VectorStores implements INode {
|
|||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Upsert Batch Size',
|
||||
name: 'batchSize',
|
||||
type: 'number',
|
||||
step: 1,
|
||||
description: 'Upsert in batches of size N',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Additional Configuration',
|
||||
name: 'additionalConfig',
|
||||
|
|
@ -232,6 +241,7 @@ class Postgres_VectorStores implements INode {
|
|||
const docs = nodeData.inputs?.document as Document[]
|
||||
const recordManager = nodeData.inputs?.recordManager
|
||||
const isFileUploadEnabled = nodeData.inputs?.fileUpload as boolean
|
||||
const _batchSize = nodeData.inputs?.batchSize
|
||||
const vectorStoreDriver: VectorStoreDriver = Postgres_VectorStores.getDriverFromConfig(nodeData, options)
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
|
|
@ -265,7 +275,15 @@ class Postgres_VectorStores implements INode {
|
|||
|
||||
return res
|
||||
} else {
|
||||
await vectorStoreDriver.fromDocuments(finalDocs)
|
||||
if (_batchSize) {
|
||||
const batchSize = parseInt(_batchSize, 10)
|
||||
for (let i = 0; i < finalDocs.length; i += batchSize) {
|
||||
const batch = finalDocs.slice(i, i + batchSize)
|
||||
await vectorStoreDriver.fromDocuments(batch)
|
||||
}
|
||||
} else {
|
||||
await vectorStoreDriver.fromDocuments(finalDocs)
|
||||
}
|
||||
|
||||
return { numAdded: finalDocs.length, addedDocs: finalDocs }
|
||||
}
|
||||
|
|
@ -285,7 +303,11 @@ class Postgres_VectorStores implements INode {
|
|||
const vectorStoreName = tableName
|
||||
await recordManager.createSchema()
|
||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||
const keys: string[] = await recordManager.listKeys({})
|
||||
const filterKeys: ICommonObject = {}
|
||||
if (options.docId) {
|
||||
filterKeys.docId = options.docId
|
||||
}
|
||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
||||
|
||||
await vectorStore.delete({ ids: keys })
|
||||
await recordManager.deleteKeys(keys)
|
||||
|
|
|
|||
|
|
@ -5,6 +5,11 @@ import { TypeORMVectorStore, TypeORMVectorStoreArgs, TypeORMVectorStoreDocument
|
|||
import { VectorStore } from '@langchain/core/vectorstores'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { Pool } from 'pg'
|
||||
import { v4 as uuid } from 'uuid'
|
||||
|
||||
type TypeORMAddDocumentOptions = {
|
||||
ids?: string[]
|
||||
}
|
||||
|
||||
export class TypeORMDriver extends VectorStoreDriver {
|
||||
protected _postgresConnectionOptions: DataSourceOptions
|
||||
|
|
@ -95,15 +100,45 @@ export class TypeORMDriver extends VectorStoreDriver {
|
|||
try {
|
||||
instance.appDataSource.getRepository(instance.documentEntity).delete(ids)
|
||||
} catch (e) {
|
||||
console.error('Failed to delete')
|
||||
console.error('Failed to delete', e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const baseAddVectorsFn = instance.addVectors.bind(instance)
|
||||
instance.addVectors = async (
|
||||
vectors: number[][],
|
||||
documents: Document[],
|
||||
documentOptions?: TypeORMAddDocumentOptions
|
||||
): Promise<void> => {
|
||||
const rows = vectors.map((embedding, idx) => {
|
||||
const embeddingString = `[${embedding.join(',')}]`
|
||||
const documentRow = {
|
||||
id: documentOptions?.ids?.length ? documentOptions.ids[idx] : uuid(),
|
||||
pageContent: documents[idx].pageContent,
|
||||
embedding: embeddingString,
|
||||
metadata: documents[idx].metadata
|
||||
}
|
||||
return documentRow
|
||||
})
|
||||
|
||||
instance.addVectors = async (vectors, documents) => {
|
||||
return baseAddVectorsFn(vectors, this.sanitizeDocuments(documents))
|
||||
const documentRepository = instance.appDataSource.getRepository(instance.documentEntity)
|
||||
const _batchSize = this.nodeData.inputs?.batchSize
|
||||
const chunkSize = _batchSize ? parseInt(_batchSize, 10) : 500
|
||||
|
||||
for (let i = 0; i < rows.length; i += chunkSize) {
|
||||
const chunk = rows.slice(i, i + chunkSize)
|
||||
try {
|
||||
await documentRepository.save(chunk)
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
throw new Error(`Error inserting: ${chunk[0].pageContent}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
instance.addDocuments = async (documents: Document[], options?: { ids?: string[] }): Promise<void> => {
|
||||
const texts = documents.map(({ pageContent }) => pageContent)
|
||||
return (instance.addVectors as any)(await this.getEmbeddings().embedDocuments(texts), documents, options)
|
||||
}
|
||||
|
||||
return instance
|
||||
|
|
|
|||
|
|
@ -385,7 +385,11 @@ class Qdrant_VectorStores implements INode {
|
|||
const vectorStoreName = collectionName
|
||||
await recordManager.createSchema()
|
||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||
const keys: string[] = await recordManager.listKeys({})
|
||||
const filterKeys: ICommonObject = {}
|
||||
if (options.docId) {
|
||||
filterKeys.docId = options.docId
|
||||
}
|
||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
||||
|
||||
await vectorStore.delete({ ids: keys })
|
||||
await recordManager.deleteKeys(keys)
|
||||
|
|
|
|||
|
|
@ -197,7 +197,11 @@ class Supabase_VectorStores implements INode {
|
|||
const vectorStoreName = tableName + '_' + queryName
|
||||
await recordManager.createSchema()
|
||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||
const keys: string[] = await recordManager.listKeys({})
|
||||
const filterKeys: ICommonObject = {}
|
||||
if (options.docId) {
|
||||
filterKeys.docId = options.docId
|
||||
}
|
||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
||||
|
||||
await supabaseStore.delete({ ids: keys })
|
||||
await recordManager.deleteKeys(keys)
|
||||
|
|
|
|||
|
|
@ -187,7 +187,11 @@ class Upstash_VectorStores implements INode {
|
|||
const vectorStoreName = UPSTASH_VECTOR_REST_URL
|
||||
await recordManager.createSchema()
|
||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||
const keys: string[] = await recordManager.listKeys({})
|
||||
const filterKeys: ICommonObject = {}
|
||||
if (options.docId) {
|
||||
filterKeys.docId = options.docId
|
||||
}
|
||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
||||
|
||||
await upstashStore.delete({ ids: keys })
|
||||
await recordManager.deleteKeys(keys)
|
||||
|
|
|
|||
|
|
@ -252,7 +252,11 @@ class Weaviate_VectorStores implements INode {
|
|||
const vectorStoreName = weaviateTextKey ? weaviateIndex + '_' + weaviateTextKey : weaviateIndex
|
||||
await recordManager.createSchema()
|
||||
;(recordManager as any).namespace = (recordManager as any).namespace + '_' + vectorStoreName
|
||||
const keys: string[] = await recordManager.listKeys({})
|
||||
const filterKeys: ICommonObject = {}
|
||||
if (options.docId) {
|
||||
filterKeys.docId = options.docId
|
||||
}
|
||||
const keys: string[] = await recordManager.listKeys(filterKeys)
|
||||
|
||||
await weaviateStore.delete({ ids: keys })
|
||||
await recordManager.deleteKeys(keys)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "flowise-components",
|
||||
"version": "3.0.10",
|
||||
"version": "3.0.11",
|
||||
"description": "Flowiseai Components",
|
||||
"main": "dist/src/index",
|
||||
"types": "dist/src/index.d.ts",
|
||||
|
|
@ -42,7 +42,8 @@
|
|||
"@google-ai/generativelanguage": "^2.5.0",
|
||||
"@google-cloud/storage": "^7.15.2",
|
||||
"@google/generative-ai": "^0.24.0",
|
||||
"@huggingface/inference": "^2.6.1",
|
||||
"@grpc/grpc-js": "^1.10.10",
|
||||
"@huggingface/inference": "^4.13.2",
|
||||
"@langchain/anthropic": "0.3.33",
|
||||
"@langchain/aws": "^0.1.11",
|
||||
"@langchain/baidu-qianfan": "^0.1.0",
|
||||
|
|
@ -73,6 +74,20 @@
|
|||
"@modelcontextprotocol/server-slack": "^2025.1.17",
|
||||
"@notionhq/client": "^2.2.8",
|
||||
"@opensearch-project/opensearch": "^1.2.0",
|
||||
"@opentelemetry/api": "1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.52.0",
|
||||
"@opentelemetry/core": "1.27.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-grpc": "0.54.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-http": "0.54.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "0.54.0",
|
||||
"@opentelemetry/exporter-trace-otlp-grpc": "0.54.0",
|
||||
"@opentelemetry/exporter-trace-otlp-http": "0.54.0",
|
||||
"@opentelemetry/exporter-trace-otlp-proto": "0.54.0",
|
||||
"@opentelemetry/resources": "1.27.0",
|
||||
"@opentelemetry/sdk-metrics": "1.27.0",
|
||||
"@opentelemetry/sdk-node": "^0.54.0",
|
||||
"@opentelemetry/sdk-trace-base": "1.27.0",
|
||||
"@opentelemetry/semantic-conventions": "1.27.0",
|
||||
"@pinecone-database/pinecone": "4.0.0",
|
||||
"@qdrant/js-client-rest": "^1.9.0",
|
||||
"@stripe/agent-toolkit": "^0.1.20",
|
||||
|
|
|
|||
|
|
@ -1,167 +0,0 @@
|
|||
import { Serializable } from '@langchain/core/load/serializable'
|
||||
import { NodeFileStore } from 'langchain/stores/file/node'
|
||||
import { isUnsafeFilePath, isWithinWorkspace } from './validator'
|
||||
import * as path from 'path'
|
||||
import * as fs from 'fs'
|
||||
|
||||
/**
|
||||
* Security configuration for file operations
|
||||
*/
|
||||
export interface FileSecurityConfig {
|
||||
/** Base workspace path - all file operations are restricted to this directory */
|
||||
workspacePath: string
|
||||
/** Whether to enforce workspace boundaries (default: true) */
|
||||
enforceWorkspaceBoundaries?: boolean
|
||||
/** Maximum file size in bytes (default: 10MB) */
|
||||
maxFileSize?: number
|
||||
/** Allowed file extensions (if empty, all extensions allowed) */
|
||||
allowedExtensions?: string[]
|
||||
/** Blocked file extensions */
|
||||
blockedExtensions?: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Secure file store that enforces workspace boundaries and validates file operations
|
||||
*/
|
||||
export class SecureFileStore extends Serializable {
|
||||
lc_namespace = ['flowise', 'components', 'stores', 'file']
|
||||
|
||||
private config: Required<FileSecurityConfig>
|
||||
private nodeFileStore: NodeFileStore
|
||||
|
||||
constructor(config: FileSecurityConfig) {
|
||||
super()
|
||||
|
||||
// Set default configuration
|
||||
this.config = {
|
||||
workspacePath: config.workspacePath,
|
||||
enforceWorkspaceBoundaries: config.enforceWorkspaceBoundaries ?? true,
|
||||
maxFileSize: config.maxFileSize ?? 10 * 1024 * 1024, // 10MB default
|
||||
allowedExtensions: config.allowedExtensions ?? [],
|
||||
blockedExtensions: config.blockedExtensions ?? [
|
||||
'.exe',
|
||||
'.bat',
|
||||
'.cmd',
|
||||
'.sh',
|
||||
'.ps1',
|
||||
'.vbs',
|
||||
'.scr',
|
||||
'.com',
|
||||
'.pif',
|
||||
'.dll',
|
||||
'.sys',
|
||||
'.msi',
|
||||
'.jar'
|
||||
]
|
||||
}
|
||||
|
||||
// Validate workspace path
|
||||
if (!this.config.workspacePath || !path.isAbsolute(this.config.workspacePath)) {
|
||||
throw new Error('Workspace path must be an absolute path')
|
||||
}
|
||||
|
||||
// Ensure workspace directory exists
|
||||
if (!fs.existsSync(this.config.workspacePath)) {
|
||||
throw new Error(`Workspace directory does not exist: ${this.config.workspacePath}`)
|
||||
}
|
||||
|
||||
// Initialize the underlying NodeFileStore with workspace path
|
||||
this.nodeFileStore = new NodeFileStore(this.config.workspacePath)
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a file path against security policies
|
||||
*/
|
||||
private validateFilePath(filePath: string): void {
|
||||
// Check for unsafe path patterns
|
||||
if (isUnsafeFilePath(filePath)) {
|
||||
throw new Error(`Unsafe file path detected: ${filePath}`)
|
||||
}
|
||||
|
||||
// Enforce workspace boundaries if enabled
|
||||
if (this.config.enforceWorkspaceBoundaries) {
|
||||
if (!isWithinWorkspace(filePath, this.config.workspacePath)) {
|
||||
throw new Error(`File path outside workspace boundaries: ${filePath}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Check file extension
|
||||
const ext = path.extname(filePath).toLowerCase()
|
||||
|
||||
// Check blocked extensions
|
||||
if (this.config.blockedExtensions.includes(ext)) {
|
||||
throw new Error(`File extension not allowed: ${ext}`)
|
||||
}
|
||||
|
||||
// Check allowed extensions (if specified)
|
||||
if (this.config.allowedExtensions.length > 0 && !this.config.allowedExtensions.includes(ext)) {
|
||||
throw new Error(`File extension not in allowed list: ${ext}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates file size
|
||||
*/
|
||||
private validateFileSize(content: string): void {
|
||||
const sizeInBytes = Buffer.byteLength(content, 'utf8')
|
||||
if (sizeInBytes > this.config.maxFileSize) {
|
||||
throw new Error(`File size exceeds maximum allowed size: ${sizeInBytes} > ${this.config.maxFileSize}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a file with security validation
|
||||
*/
|
||||
async readFile(filePath: string): Promise<string> {
|
||||
this.validateFilePath(filePath)
|
||||
|
||||
try {
|
||||
return await this.nodeFileStore.readFile(filePath)
|
||||
} catch (error) {
|
||||
// Provide generic error message to avoid information leakage
|
||||
throw new Error(`Failed to read file: ${path.basename(filePath)}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes a file with security validation
|
||||
*/
|
||||
async writeFile(filePath: string, contents: string): Promise<void> {
|
||||
this.validateFilePath(filePath)
|
||||
this.validateFileSize(contents)
|
||||
|
||||
try {
|
||||
// Ensure the directory exists
|
||||
const dir = path.dirname(path.resolve(this.config.workspacePath, filePath))
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true })
|
||||
}
|
||||
|
||||
await this.nodeFileStore.writeFile(filePath, contents)
|
||||
} catch (error) {
|
||||
// Provide generic error message to avoid information leakage
|
||||
throw new Error(`Failed to write file: ${path.basename(filePath)}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the workspace configuration
|
||||
*/
|
||||
getConfig(): Readonly<Required<FileSecurityConfig>> {
|
||||
return { ...this.config }
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a secure file store with workspace enforcement disabled (for backward compatibility)
|
||||
* WARNING: This should only be used when absolutely necessary and with proper user consent
|
||||
*/
|
||||
static createUnsecure(basePath?: string): SecureFileStore {
|
||||
const workspacePath = basePath || process.cwd()
|
||||
return new SecureFileStore({
|
||||
workspacePath,
|
||||
enforceWorkspaceBoundaries: false,
|
||||
maxFileSize: 50 * 1024 * 1024, // 50MB for insecure mode
|
||||
blockedExtensions: [] // No extension restrictions in insecure mode
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -1774,7 +1774,7 @@ export class AnalyticHandler {
|
|||
}
|
||||
|
||||
if (Object.prototype.hasOwnProperty.call(this.handlers, 'lunary')) {
|
||||
const toolEventId: string = this.handlers['lunary'].llmEvent[returnIds['lunary'].toolEvent]
|
||||
const toolEventId: string = this.handlers['lunary'].toolEvent[returnIds['lunary'].toolEvent]
|
||||
const monitor = this.handlers['lunary'].client
|
||||
|
||||
if (monitor && toolEventId) {
|
||||
|
|
|
|||
|
|
@ -8,6 +8,10 @@ import { IndexingResult } from './Interface'
|
|||
|
||||
type Metadata = Record<string, unknown>
|
||||
|
||||
export interface ExtendedRecordManagerInterface extends RecordManagerInterface {
|
||||
update(keys: Array<{ uid: string; docId: string }> | string[], updateOptions?: Record<string, any>): Promise<void>
|
||||
}
|
||||
|
||||
type StringOrDocFunc = string | ((doc: DocumentInterface) => string)
|
||||
|
||||
export interface HashedDocumentInterface extends DocumentInterface {
|
||||
|
|
@ -207,7 +211,7 @@ export const _isBaseDocumentLoader = (arg: any): arg is BaseDocumentLoader => {
|
|||
|
||||
interface IndexArgs {
|
||||
docsSource: BaseDocumentLoader | DocumentInterface[]
|
||||
recordManager: RecordManagerInterface
|
||||
recordManager: ExtendedRecordManagerInterface
|
||||
vectorStore: VectorStore
|
||||
options?: IndexOptions
|
||||
}
|
||||
|
|
@ -275,7 +279,7 @@ export async function index(args: IndexArgs): Promise<IndexingResult> {
|
|||
|
||||
const uids: string[] = []
|
||||
const docsToIndex: DocumentInterface[] = []
|
||||
const docsToUpdate: string[] = []
|
||||
const docsToUpdate: Array<{ uid: string; docId: string }> = []
|
||||
const seenDocs = new Set<string>()
|
||||
hashedDocs.forEach((hashedDoc, i) => {
|
||||
const docExists = batchExists[i]
|
||||
|
|
@ -283,7 +287,7 @@ export async function index(args: IndexArgs): Promise<IndexingResult> {
|
|||
if (forceUpdate) {
|
||||
seenDocs.add(hashedDoc.uid)
|
||||
} else {
|
||||
docsToUpdate.push(hashedDoc.uid)
|
||||
docsToUpdate.push({ uid: hashedDoc.uid, docId: hashedDoc.metadata.docId as string })
|
||||
return
|
||||
}
|
||||
}
|
||||
|
|
@ -308,7 +312,7 @@ export async function index(args: IndexArgs): Promise<IndexingResult> {
|
|||
}
|
||||
|
||||
await recordManager.update(
|
||||
hashedDocs.map((doc) => doc.uid),
|
||||
hashedDocs.map((doc) => ({ uid: doc.uid, docId: doc.metadata.docId as string })),
|
||||
{ timeAtLeast: indexStartDt, groupIds: sourceIds }
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import { cloneDeep, omit, get } from 'lodash'
|
|||
import TurndownService from 'turndown'
|
||||
import { DataSource, Equal } from 'typeorm'
|
||||
import { ICommonObject, IDatabaseEntity, IFileUpload, IMessage, INodeData, IVariable, MessageContentImageUrl } from './Interface'
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
||||
import { AES, enc } from 'crypto-js'
|
||||
import { AIMessage, HumanMessage, BaseMessage } from '@langchain/core/messages'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
|
|
@ -1500,9 +1501,29 @@ export const executeJavaScriptCode = async (
|
|||
|
||||
const sbx = await Sandbox.create({ apiKey: process.env.E2B_APIKEY, timeoutMs })
|
||||
|
||||
// Determine which libraries to install
|
||||
const librariesToInstall = new Set<string>(libraries)
|
||||
|
||||
// Auto-detect required libraries from code
|
||||
// Extract required modules from import/require statements
|
||||
const importRegex = /(?:import\s+.*?\s+from\s+['"]([^'"]+)['"]|require\s*\(\s*['"]([^'"]+)['"]\s*\))/g
|
||||
let match
|
||||
while ((match = importRegex.exec(code)) !== null) {
|
||||
const moduleName = match[1] || match[2]
|
||||
// Extract base module name (e.g., 'typeorm' from 'typeorm/something')
|
||||
const baseModuleName = moduleName.split('/')[0]
|
||||
librariesToInstall.add(baseModuleName)
|
||||
}
|
||||
|
||||
// Install libraries
|
||||
for (const library of libraries) {
|
||||
await sbx.commands.run(`npm install ${library}`)
|
||||
for (const library of librariesToInstall) {
|
||||
// Validate library name to prevent command injection.
|
||||
const validPackageNameRegex = /^(@[a-z0-9-~][a-z0-9-._~]*\/)?[a-z0-9-~][a-z0-9-._~]*$/
|
||||
if (validPackageNameRegex.test(library)) {
|
||||
await sbx.commands.run(`npm install ${library}`)
|
||||
} else {
|
||||
console.warn(`[Sandbox] Skipping installation of invalid module: ${library}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Separate imports from the rest of the code for proper ES6 module structure
|
||||
|
|
@ -1921,3 +1942,160 @@ export async function parseWithTypeConversion<T extends z.ZodTypeAny>(schema: T,
|
|||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures structured output for the LLM using Zod schema
|
||||
* @param {BaseChatModel} llmNodeInstance - The LLM instance to configure
|
||||
* @param {any[]} structuredOutput - Array of structured output schema definitions
|
||||
* @returns {BaseChatModel} - The configured LLM instance
|
||||
*/
|
||||
export const configureStructuredOutput = (llmNodeInstance: BaseChatModel, structuredOutput: any[]): BaseChatModel => {
|
||||
try {
|
||||
const zodObj: ICommonObject = {}
|
||||
for (const sch of structuredOutput) {
|
||||
if (sch.type === 'string') {
|
||||
zodObj[sch.key] = z.string().describe(sch.description || '')
|
||||
} else if (sch.type === 'stringArray') {
|
||||
zodObj[sch.key] = z.array(z.string()).describe(sch.description || '')
|
||||
} else if (sch.type === 'number') {
|
||||
zodObj[sch.key] = z.number().describe(sch.description || '')
|
||||
} else if (sch.type === 'boolean') {
|
||||
zodObj[sch.key] = z.boolean().describe(sch.description || '')
|
||||
} else if (sch.type === 'enum') {
|
||||
const enumValues = sch.enumValues?.split(',').map((item: string) => item.trim()) || []
|
||||
zodObj[sch.key] = z
|
||||
.enum(enumValues.length ? (enumValues as [string, ...string[]]) : ['default'])
|
||||
.describe(sch.description || '')
|
||||
} else if (sch.type === 'jsonArray') {
|
||||
const jsonSchema = sch.jsonSchema
|
||||
if (jsonSchema) {
|
||||
try {
|
||||
// Parse the JSON schema
|
||||
const schemaObj = JSON.parse(jsonSchema)
|
||||
|
||||
// Create a Zod schema from the JSON schema
|
||||
const itemSchema = createZodSchemaFromJSON(schemaObj)
|
||||
|
||||
// Create an array schema of the item schema
|
||||
zodObj[sch.key] = z.array(itemSchema).describe(sch.description || '')
|
||||
} catch (err) {
|
||||
console.error(`Error parsing JSON schema for ${sch.key}:`, err)
|
||||
// Fallback to generic array of records
|
||||
zodObj[sch.key] = z.array(z.record(z.any())).describe(sch.description || '')
|
||||
}
|
||||
} else {
|
||||
// If no schema provided, use generic array of records
|
||||
zodObj[sch.key] = z.array(z.record(z.any())).describe(sch.description || '')
|
||||
}
|
||||
}
|
||||
}
|
||||
const structuredOutputSchema = z.object(zodObj)
|
||||
|
||||
// @ts-ignore
|
||||
return llmNodeInstance.withStructuredOutput(structuredOutputSchema)
|
||||
} catch (exception) {
|
||||
console.error(exception)
|
||||
return llmNodeInstance
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Zod schema from a JSON schema object
|
||||
* @param {any} jsonSchema - The JSON schema object
|
||||
* @returns {z.ZodTypeAny} - A Zod schema
|
||||
*/
|
||||
export const createZodSchemaFromJSON = (jsonSchema: any): z.ZodTypeAny => {
|
||||
// If the schema is an object with properties, create an object schema
|
||||
if (typeof jsonSchema === 'object' && jsonSchema !== null) {
|
||||
const schemaObj: Record<string, z.ZodTypeAny> = {}
|
||||
|
||||
// Process each property in the schema
|
||||
for (const [key, value] of Object.entries(jsonSchema)) {
|
||||
if (value === null) {
|
||||
// Handle null values
|
||||
schemaObj[key] = z.null()
|
||||
} else if (typeof value === 'object' && !Array.isArray(value)) {
|
||||
// Check if the property has a type definition
|
||||
if ('type' in value) {
|
||||
const type = value.type as string
|
||||
const description = ('description' in value ? (value.description as string) : '') || ''
|
||||
|
||||
// Create the appropriate Zod type based on the type property
|
||||
if (type === 'string') {
|
||||
schemaObj[key] = z.string().describe(description)
|
||||
} else if (type === 'number') {
|
||||
schemaObj[key] = z.number().describe(description)
|
||||
} else if (type === 'boolean') {
|
||||
schemaObj[key] = z.boolean().describe(description)
|
||||
} else if (type === 'array') {
|
||||
// If it's an array type, check if items is defined
|
||||
if ('items' in value && value.items) {
|
||||
const itemSchema = createZodSchemaFromJSON(value.items)
|
||||
schemaObj[key] = z.array(itemSchema).describe(description)
|
||||
} else {
|
||||
// Default to array of any if items not specified
|
||||
schemaObj[key] = z.array(z.any()).describe(description)
|
||||
}
|
||||
} else if (type === 'object') {
|
||||
// If it's an object type, check if properties is defined
|
||||
if ('properties' in value && value.properties) {
|
||||
const nestedSchema = createZodSchemaFromJSON(value.properties)
|
||||
schemaObj[key] = nestedSchema.describe(description)
|
||||
} else {
|
||||
// Default to record of any if properties not specified
|
||||
schemaObj[key] = z.record(z.any()).describe(description)
|
||||
}
|
||||
} else {
|
||||
// Default to any for unknown types
|
||||
schemaObj[key] = z.any().describe(description)
|
||||
}
|
||||
|
||||
// Check if the property is optional
|
||||
if ('optional' in value && value.optional === true) {
|
||||
schemaObj[key] = schemaObj[key].optional()
|
||||
}
|
||||
} else if (Array.isArray(value)) {
|
||||
// Array values without a type property
|
||||
if (value.length > 0) {
|
||||
// If the array has items, recursively create a schema for the first item
|
||||
const itemSchema = createZodSchemaFromJSON(value[0])
|
||||
schemaObj[key] = z.array(itemSchema)
|
||||
} else {
|
||||
// Empty array, allow any array
|
||||
schemaObj[key] = z.array(z.any())
|
||||
}
|
||||
} else {
|
||||
// It's a nested object without a type property, recursively create schema
|
||||
schemaObj[key] = createZodSchemaFromJSON(value)
|
||||
}
|
||||
} else if (Array.isArray(value)) {
|
||||
// Array values
|
||||
if (value.length > 0) {
|
||||
// If the array has items, recursively create a schema for the first item
|
||||
const itemSchema = createZodSchemaFromJSON(value[0])
|
||||
schemaObj[key] = z.array(itemSchema)
|
||||
} else {
|
||||
// Empty array, allow any array
|
||||
schemaObj[key] = z.array(z.any())
|
||||
}
|
||||
} else {
|
||||
// For primitive values (which shouldn't be in the schema directly)
|
||||
// Use the corresponding Zod type
|
||||
if (typeof value === 'string') {
|
||||
schemaObj[key] = z.string()
|
||||
} else if (typeof value === 'number') {
|
||||
schemaObj[key] = z.number()
|
||||
} else if (typeof value === 'boolean') {
|
||||
schemaObj[key] = z.boolean()
|
||||
} else {
|
||||
schemaObj[key] = z.any()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return z.object(schemaObj)
|
||||
}
|
||||
|
||||
// Fallback to any for unknown types
|
||||
return z.any()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -69,36 +69,3 @@ export const isUnsafeFilePath = (filePath: string): boolean => {
|
|||
|
||||
return dangerousPatterns.some((pattern) => pattern.test(filePath))
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates if a file path is within the allowed workspace boundaries
|
||||
* @param {string} filePath The file path to validate
|
||||
* @param {string} workspacePath The workspace base path
|
||||
* @returns {boolean} True if path is within workspace, false otherwise
|
||||
*/
|
||||
export const isWithinWorkspace = (filePath: string, workspacePath: string): boolean => {
|
||||
if (!filePath || !workspacePath) {
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
const path = require('path')
|
||||
|
||||
// Resolve both paths to absolute paths
|
||||
const resolvedFilePath = path.resolve(workspacePath, filePath)
|
||||
const resolvedWorkspacePath = path.resolve(workspacePath)
|
||||
|
||||
// Normalize paths to handle different separators
|
||||
const normalizedFilePath = path.normalize(resolvedFilePath)
|
||||
const normalizedWorkspacePath = path.normalize(resolvedWorkspacePath)
|
||||
|
||||
// Check if the file path starts with the workspace path
|
||||
const relativePath = path.relative(normalizedWorkspacePath, normalizedFilePath)
|
||||
|
||||
// If relative path starts with '..' or is absolute, it's outside workspace
|
||||
return !relativePath.startsWith('..') && !path.isAbsolute(relativePath)
|
||||
} catch (error) {
|
||||
// If any error occurs during path resolution, deny access
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ cd Flowise/packages/server
|
|||
pnpm install
|
||||
./node_modules/.bin/cypress install
|
||||
pnpm build
|
||||
#Only for writting new tests on local dev -> pnpm run cypress:open
|
||||
#Only for writing new tests on local dev -> pnpm run cypress:open
|
||||
pnpm run e2e
|
||||
```
|
||||
|
||||
|
|
|
|||
|
|
@ -284,7 +284,7 @@
|
|||
"inputAnchors": [],
|
||||
"inputs": {
|
||||
"customFunctionInputVariables": "",
|
||||
"customFunctionJavascriptFunction": "const { DataSource } = require('typeorm');\n\nconst HOST = 'localhost';\nconst USER = 'testuser';\nconst PASSWORD = 'testpwd';\nconst DATABASE = 'abudhabi';\nconst PORT = 5555;\n\nlet sqlSchemaPrompt = '';\n\nconst AppDataSource = new DataSource({\n type: 'postgres',\n host: HOST,\n port: PORT,\n username: USER,\n password: PASSWORD,\n database: DATABASE,\n synchronize: false,\n logging: false,\n});\n\nasync function getSQLPrompt() {\n try {\n await AppDataSource.initialize();\n const queryRunner = AppDataSource.createQueryRunner();\n\n // Get all user-defined tables (excluding system tables)\n const tablesResult = await queryRunner.query(`\n SELECT table_name\n FROM information_schema.tables\n WHERE table_schema = 'public' AND table_type = 'BASE TABLE'\n `);\n\n for (const tableRow of tablesResult) {\n const tableName = tableRow.table_name;\n\n const schemaInfo = await queryRunner.query(`\n SELECT column_name, data_type, is_nullable\n FROM information_schema.columns\n WHERE table_name = '${tableName}'\n `);\n\n const createColumns = [];\n const columnNames = [];\n\n for (const column of schemaInfo) {\n const name = column.column_name;\n const type = column.data_type.toUpperCase();\n const notNull = column.is_nullable === 'NO' ? 'NOT NULL' : '';\n columnNames.push(name);\n createColumns.push(`${name} ${type} ${notNull}`);\n }\n\n const sqlCreateTableQuery = `CREATE TABLE ${tableName} (${createColumns.join(', ')})`;\n const sqlSelectTableQuery = `SELECT * FROM ${tableName} LIMIT 3`;\n\n let allValues = [];\n try {\n const rows = await queryRunner.query(sqlSelectTableQuery);\n\n allValues = rows.map(row =>\n columnNames.map(col => row[col]).join(' ')\n );\n } catch (err) {\n allValues.push('[ERROR FETCHING ROWS]');\n }\n\n sqlSchemaPrompt +=\n sqlCreateTableQuery +\n '\\n' +\n sqlSelectTableQuery +\n '\\n' +\n columnNames.join(' ') +\n '\\n' +\n allValues.join('\\n') +\n '\\n\\n';\n }\n\n await queryRunner.release();\n } catch (err) {\n console.error(err);\n throw err;\n }\n}\n\nasync function main() {\n await getSQLPrompt();\n}\n\nawait main();\n\nreturn sqlSchemaPrompt;\n",
|
||||
"customFunctionJavascriptFunction": "const { DataSource } = require('typeorm');\nconst { Pool } = require('pg');\n\nconst HOST = 'localhost';\nconst USER = 'testuser';\nconst PASSWORD = 'testpwd';\nconst DATABASE = 'abudhabi';\nconst PORT = 5555;\n\nlet sqlSchemaPrompt = '';\n\nconst AppDataSource = new DataSource({\n type: 'postgres',\n host: HOST,\n port: PORT,\n username: USER,\n password: PASSWORD,\n database: DATABASE,\n synchronize: false,\n logging: false,\n});\n\nasync function getSQLPrompt() {\n try {\n await AppDataSource.initialize();\n const queryRunner = AppDataSource.createQueryRunner();\n\n // Get all user-defined tables (excluding system tables)\n const tablesResult = await queryRunner.query(`\n SELECT table_name\n FROM information_schema.tables\n WHERE table_schema = 'public' AND table_type = 'BASE TABLE'\n `);\n\n for (const tableRow of tablesResult) {\n const tableName = tableRow.table_name;\n\n const schemaInfo = await queryRunner.query(`\n SELECT column_name, data_type, is_nullable\n FROM information_schema.columns\n WHERE table_name = '${tableName}'\n `);\n\n const createColumns = [];\n const columnNames = [];\n\n for (const column of schemaInfo) {\n const name = column.column_name;\n const type = column.data_type.toUpperCase();\n const notNull = column.is_nullable === 'NO' ? 'NOT NULL' : '';\n columnNames.push(name);\n createColumns.push(`${name} ${type} ${notNull}`);\n }\n\n const sqlCreateTableQuery = `CREATE TABLE ${tableName} (${createColumns.join(', ')})`;\n const sqlSelectTableQuery = `SELECT * FROM ${tableName} LIMIT 3`;\n\n let allValues = [];\n try {\n const rows = await queryRunner.query(sqlSelectTableQuery);\n\n allValues = rows.map(row =>\n columnNames.map(col => row[col]).join(' ')\n );\n } catch (err) {\n allValues.push('[ERROR FETCHING ROWS]');\n }\n\n sqlSchemaPrompt +=\n sqlCreateTableQuery +\n '\\n' +\n sqlSelectTableQuery +\n '\\n' +\n columnNames.join(' ') +\n '\\n' +\n allValues.join('\\n') +\n '\\n\\n';\n }\n\n await queryRunner.release();\n } catch (err) {\n console.error(err);\n throw err;\n }\n}\n\nasync function main() {\n await getSQLPrompt();\n}\n\nawait main();\n\nreturn sqlSchemaPrompt;\n",
|
||||
"customFunctionUpdateState": ""
|
||||
},
|
||||
"outputAnchors": [
|
||||
|
|
@ -913,7 +913,7 @@
|
|||
"variableValue": "<p><span class=\"variable\" data-type=\"mention\" data-id=\"$flow.state.sqlQuery\" data-label=\"$flow.state.sqlQuery\">{{ $flow.state.sqlQuery }}</span> </p>"
|
||||
}
|
||||
],
|
||||
"customFunctionJavascriptFunction": "const { DataSource } = require('typeorm');\n\n// Configuration\nconst HOST = 'localhost';\nconst USER = 'testuser';\nconst PASSWORD = 'testpwd';\nconst DATABASE = 'abudhabi';\nconst PORT = 5555;\n\nconst sqlQuery = $sqlQuery;\n\nconst AppDataSource = new DataSource({\n type: 'postgres',\n host: HOST,\n port: PORT,\n username: USER,\n password: PASSWORD,\n database: DATABASE,\n synchronize: false,\n logging: false,\n});\n\nlet formattedResult = '';\n\nasync function runSQLQuery(query) {\n try {\n await AppDataSource.initialize();\n const queryRunner = AppDataSource.createQueryRunner();\n\n const rows = await queryRunner.query(query);\n console.log('rows =', rows);\n\n if (rows.length === 0) {\n formattedResult = '[No results returned]';\n } else {\n const columnNames = Object.keys(rows[0]);\n const header = columnNames.join(' ');\n const values = rows.map(row =>\n columnNames.map(col => row[col]).join(' ')\n );\n\n formattedResult = query + '\\n' + header + '\\n' + values.join('\\n');\n }\n\n await queryRunner.release();\n } catch (err) {\n console.error('[ERROR]', err);\n formattedResult = `[Error executing query]: ${err}`;\n }\n\n return formattedResult;\n}\n\nasync function main() {\n formattedResult = await runSQLQuery(sqlQuery);\n}\n\nawait main();\n\nreturn formattedResult;\n",
|
||||
"customFunctionJavascriptFunction": "const { DataSource } = require('typeorm');\nconst { Pool } = require('pg');\n\n// Configuration\nconst HOST = 'localhost';\nconst USER = 'testuser';\nconst PASSWORD = 'testpwd';\nconst DATABASE = 'abudhabi';\nconst PORT = 5555;\n\nconst sqlQuery = $sqlQuery;\n\nconst AppDataSource = new DataSource({\n type: 'postgres',\n host: HOST,\n port: PORT,\n username: USER,\n password: PASSWORD,\n database: DATABASE,\n synchronize: false,\n logging: false,\n});\n\nlet formattedResult = '';\n\nasync function runSQLQuery(query) {\n try {\n await AppDataSource.initialize();\n const queryRunner = AppDataSource.createQueryRunner();\n\n const rows = await queryRunner.query(query);\n console.log('rows =', rows);\n\n if (rows.length === 0) {\n formattedResult = '[No results returned]';\n } else {\n const columnNames = Object.keys(rows[0]);\n const header = columnNames.join(' ');\n const values = rows.map(row =>\n columnNames.map(col => row[col]).join(' ')\n );\n\n formattedResult = query + '\\n' + header + '\\n' + values.join('\\n');\n }\n\n await queryRunner.release();\n } catch (err) {\n console.error('[ERROR]', err);\n formattedResult = `[Error executing query]: ${err}`;\n }\n\n return formattedResult;\n}\n\nasync function main() {\n formattedResult = await runSQLQuery(sqlQuery);\n}\n\nawait main();\n\nreturn formattedResult;\n",
|
||||
"customFunctionUpdateState": ""
|
||||
},
|
||||
"outputAnchors": [
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "flowise",
|
||||
"version": "3.0.10",
|
||||
"version": "3.0.11",
|
||||
"description": "Flowiseai Server",
|
||||
"main": "dist/index",
|
||||
"types": "dist/index.d.ts",
|
||||
|
|
@ -66,7 +66,7 @@
|
|||
"@google-cloud/logging-winston": "^6.0.0",
|
||||
"@keyv/redis": "^4.2.0",
|
||||
"@oclif/core": "4.0.7",
|
||||
"@opentelemetry/api": "^1.3.0",
|
||||
"@opentelemetry/api": "1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.52.0",
|
||||
"@opentelemetry/core": "1.27.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-grpc": "0.54.0",
|
||||
|
|
@ -119,12 +119,12 @@
|
|||
"lodash": "^4.17.21",
|
||||
"moment": "^2.29.3",
|
||||
"moment-timezone": "^0.5.34",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"multer": "^2.0.2",
|
||||
"multer-cloud-storage": "^4.0.0",
|
||||
"multer-s3": "^3.0.1",
|
||||
"mysql2": "^3.11.3",
|
||||
"nanoid": "3",
|
||||
"nodemailer": "^6.9.14",
|
||||
"nodemailer": "^7.0.7",
|
||||
"openai": "^4.96.0",
|
||||
"passport": "^0.7.0",
|
||||
"passport-auth0": "^1.4.4",
|
||||
|
|
|
|||
|
|
@ -37,7 +37,19 @@ export class UsageCacheManager {
|
|||
if (process.env.MODE === MODE.QUEUE) {
|
||||
let redisConfig: string | Record<string, any>
|
||||
if (process.env.REDIS_URL) {
|
||||
redisConfig = process.env.REDIS_URL
|
||||
redisConfig = {
|
||||
url: process.env.REDIS_URL,
|
||||
socket: {
|
||||
keepAlive:
|
||||
process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10))
|
||||
? parseInt(process.env.REDIS_KEEP_ALIVE, 10)
|
||||
: undefined
|
||||
},
|
||||
pingInterval:
|
||||
process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10))
|
||||
? parseInt(process.env.REDIS_KEEP_ALIVE, 10)
|
||||
: undefined
|
||||
}
|
||||
} else {
|
||||
redisConfig = {
|
||||
username: process.env.REDIS_USERNAME || undefined,
|
||||
|
|
@ -48,8 +60,16 @@ export class UsageCacheManager {
|
|||
tls: process.env.REDIS_TLS === 'true',
|
||||
cert: process.env.REDIS_CERT ? Buffer.from(process.env.REDIS_CERT, 'base64') : undefined,
|
||||
key: process.env.REDIS_KEY ? Buffer.from(process.env.REDIS_KEY, 'base64') : undefined,
|
||||
ca: process.env.REDIS_CA ? Buffer.from(process.env.REDIS_CA, 'base64') : undefined
|
||||
}
|
||||
ca: process.env.REDIS_CA ? Buffer.from(process.env.REDIS_CA, 'base64') : undefined,
|
||||
keepAlive:
|
||||
process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10))
|
||||
? parseInt(process.env.REDIS_KEEP_ALIVE, 10)
|
||||
: undefined
|
||||
},
|
||||
pingInterval:
|
||||
process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10))
|
||||
? parseInt(process.env.REDIS_KEEP_ALIVE, 10)
|
||||
: undefined
|
||||
}
|
||||
}
|
||||
this.cache = createCache({
|
||||
|
|
|
|||
|
|
@ -465,9 +465,10 @@ const insertIntoVectorStore = async (req: Request, res: Response, next: NextFunc
|
|||
}
|
||||
const subscriptionId = req.user?.activeOrganizationSubscriptionId || ''
|
||||
const body = req.body
|
||||
const isStrictSave = body.isStrictSave ?? false
|
||||
const apiResponse = await documentStoreService.insertIntoVectorStoreMiddleware(
|
||||
body,
|
||||
false,
|
||||
isStrictSave,
|
||||
orgId,
|
||||
workspaceId,
|
||||
subscriptionId,
|
||||
|
|
@ -513,7 +514,11 @@ const deleteVectorStoreFromStore = async (req: Request, res: Response, next: Nex
|
|||
`Error: documentStoreController.deleteVectorStoreFromStore - workspaceId not provided!`
|
||||
)
|
||||
}
|
||||
const apiResponse = await documentStoreService.deleteVectorStoreFromStore(req.params.storeId, workspaceId)
|
||||
const apiResponse = await documentStoreService.deleteVectorStoreFromStore(
|
||||
req.params.storeId,
|
||||
workspaceId,
|
||||
(req.query.docId as string) || undefined
|
||||
)
|
||||
return res.json(apiResponse)
|
||||
} catch (error) {
|
||||
next(error)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,14 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class FixDocumentStoreFileChunkLongText1765000000000 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`pageContent\` LONGTEXT NOT NULL;`)
|
||||
await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`metadata\` LONGTEXT NULL;`)
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
// WARNING: Reverting to TEXT may cause data loss if content exceeds the 64KB limit.
|
||||
await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`pageContent\` TEXT NOT NULL;`)
|
||||
await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`metadata\` TEXT NULL;`)
|
||||
}
|
||||
}
|
||||
|
|
@ -40,6 +40,7 @@ import { AddTextToSpeechToChatFlow1754986457485 } from './1754986457485-AddTextT
|
|||
import { ModifyChatflowType1755066758601 } from './1755066758601-ModifyChatflowType'
|
||||
import { AddTextToSpeechToChatFlow1759419231100 } from './1759419231100-AddTextToSpeechToChatFlow'
|
||||
import { AddChatFlowNameIndex1759424809984 } from './1759424809984-AddChatFlowNameIndex'
|
||||
import { FixDocumentStoreFileChunkLongText1765000000000 } from './1765000000000-FixDocumentStoreFileChunkLongText'
|
||||
|
||||
import { AddAuthTables1720230151482 } from '../../../enterprise/database/migrations/mariadb/1720230151482-AddAuthTables'
|
||||
import { AddWorkspace1725437498242 } from '../../../enterprise/database/migrations/mariadb/1725437498242-AddWorkspace'
|
||||
|
|
@ -106,5 +107,6 @@ export const mariadbMigrations = [
|
|||
AddTextToSpeechToChatFlow1754986457485,
|
||||
ModifyChatflowType1755066758601,
|
||||
AddTextToSpeechToChatFlow1759419231100,
|
||||
AddChatFlowNameIndex1759424809984
|
||||
AddChatFlowNameIndex1759424809984,
|
||||
FixDocumentStoreFileChunkLongText1765000000000
|
||||
]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,14 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class FixDocumentStoreFileChunkLongText1765000000000 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`pageContent\` LONGTEXT NOT NULL;`)
|
||||
await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`metadata\` LONGTEXT NULL;`)
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
// WARNING: Reverting to TEXT may cause data loss if content exceeds the 64KB limit.
|
||||
await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`pageContent\` TEXT NOT NULL;`)
|
||||
await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`metadata\` TEXT NULL;`)
|
||||
}
|
||||
}
|
||||
|
|
@ -41,6 +41,7 @@ import { AddTextToSpeechToChatFlow1754986468397 } from './1754986468397-AddTextT
|
|||
import { ModifyChatflowType1755066758601 } from './1755066758601-ModifyChatflowType'
|
||||
import { AddTextToSpeechToChatFlow1759419216034 } from './1759419216034-AddTextToSpeechToChatFlow'
|
||||
import { AddChatFlowNameIndex1759424828558 } from './1759424828558-AddChatFlowNameIndex'
|
||||
import { FixDocumentStoreFileChunkLongText1765000000000 } from './1765000000000-FixDocumentStoreFileChunkLongText'
|
||||
|
||||
import { AddAuthTables1720230151482 } from '../../../enterprise/database/migrations/mysql/1720230151482-AddAuthTables'
|
||||
import { AddWorkspace1720230151484 } from '../../../enterprise/database/migrations/mysql/1720230151484-AddWorkspace'
|
||||
|
|
@ -108,5 +109,6 @@ export const mysqlMigrations = [
|
|||
AddTextToSpeechToChatFlow1754986468397,
|
||||
ModifyChatflowType1755066758601,
|
||||
AddTextToSpeechToChatFlow1759419216034,
|
||||
AddChatFlowNameIndex1759424828558
|
||||
AddChatFlowNameIndex1759424828558,
|
||||
FixDocumentStoreFileChunkLongText1765000000000
|
||||
]
|
||||
|
|
|
|||
|
|
@ -391,7 +391,7 @@ const deleteDocumentStoreFileChunk = async (storeId: string, docId: string, chun
|
|||
}
|
||||
}
|
||||
|
||||
const deleteVectorStoreFromStore = async (storeId: string, workspaceId: string) => {
|
||||
const deleteVectorStoreFromStore = async (storeId: string, workspaceId: string, docId?: string) => {
|
||||
try {
|
||||
const appServer = getRunningExpressApp()
|
||||
const componentNodes = appServer.nodesPool.componentNodes
|
||||
|
|
@ -461,7 +461,7 @@ const deleteVectorStoreFromStore = async (storeId: string, workspaceId: string)
|
|||
|
||||
// Call the delete method of the vector store
|
||||
if (vectorStoreObj.vectorStoreMethods.delete) {
|
||||
await vectorStoreObj.vectorStoreMethods.delete(vStoreNodeData, idsToDelete, options)
|
||||
await vectorStoreObj.vectorStoreMethods.delete(vStoreNodeData, idsToDelete, { ...options, docId })
|
||||
}
|
||||
} catch (error) {
|
||||
throw new InternalFlowiseError(
|
||||
|
|
@ -1157,6 +1157,18 @@ const updateVectorStoreConfigOnly = async (data: ICommonObject, workspaceId: str
|
|||
)
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Saves vector store configuration to the document store entity.
|
||||
* Handles embedding, vector store, and record manager configurations.
|
||||
*
|
||||
* @example
|
||||
* // Strict mode: Only save what's provided, clear the rest
|
||||
* await saveVectorStoreConfig(ds, { storeId, embeddingName, embeddingConfig }, true, wsId)
|
||||
*
|
||||
* @example
|
||||
* // Lenient mode: Reuse existing configs if not provided
|
||||
* await saveVectorStoreConfig(ds, { storeId, vectorStoreName, vectorStoreConfig }, false, wsId)
|
||||
*/
|
||||
const saveVectorStoreConfig = async (appDataSource: DataSource, data: ICommonObject, isStrictSave = true, workspaceId: string) => {
|
||||
try {
|
||||
const entity = await appDataSource.getRepository(DocumentStore).findOneBy({
|
||||
|
|
@ -1221,6 +1233,15 @@ const saveVectorStoreConfig = async (appDataSource: DataSource, data: ICommonObj
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Inserts documents from document store into the configured vector store.
|
||||
*
|
||||
* Process:
|
||||
* 1. Saves vector store configuration (embedding, vector store, record manager)
|
||||
* 2. Sets document store status to UPSERTING
|
||||
* 3. Performs the actual vector store upsert operation
|
||||
* 4. Updates status to UPSERTED upon completion
|
||||
*/
|
||||
export const insertIntoVectorStore = async ({
|
||||
appDataSource,
|
||||
componentNodes,
|
||||
|
|
@ -1231,19 +1252,16 @@ export const insertIntoVectorStore = async ({
|
|||
workspaceId
|
||||
}: IExecuteVectorStoreInsert) => {
|
||||
try {
|
||||
// Step 1: Save configuration based on isStrictSave mode
|
||||
const entity = await saveVectorStoreConfig(appDataSource, data, isStrictSave, workspaceId)
|
||||
|
||||
// Step 2: Mark as UPSERTING before starting the operation
|
||||
entity.status = DocumentStoreStatus.UPSERTING
|
||||
await appDataSource.getRepository(DocumentStore).save(entity)
|
||||
|
||||
const indexResult = await _insertIntoVectorStoreWorkerThread(
|
||||
appDataSource,
|
||||
componentNodes,
|
||||
telemetry,
|
||||
data,
|
||||
isStrictSave,
|
||||
orgId,
|
||||
workspaceId
|
||||
)
|
||||
// Step 3: Perform the actual vector store upsert
|
||||
// Note: Configuration already saved above, worker thread just retrieves and uses it
|
||||
const indexResult = await _insertIntoVectorStoreWorkerThread(appDataSource, componentNodes, telemetry, data, orgId, workspaceId)
|
||||
return indexResult
|
||||
} catch (error) {
|
||||
throw new InternalFlowiseError(
|
||||
|
|
@ -1308,12 +1326,18 @@ const _insertIntoVectorStoreWorkerThread = async (
|
|||
componentNodes: IComponentNodes,
|
||||
telemetry: Telemetry,
|
||||
data: ICommonObject,
|
||||
isStrictSave = true,
|
||||
orgId: string,
|
||||
workspaceId: string
|
||||
) => {
|
||||
try {
|
||||
const entity = await saveVectorStoreConfig(appDataSource, data, isStrictSave, workspaceId)
|
||||
// Configuration already saved by insertIntoVectorStore, just retrieve the entity
|
||||
const entity = await appDataSource.getRepository(DocumentStore).findOneBy({
|
||||
id: data.storeId,
|
||||
workspaceId: workspaceId
|
||||
})
|
||||
if (!entity) {
|
||||
throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Document store ${data.storeId} not found`)
|
||||
}
|
||||
let upsertHistory: Record<string, any> = {}
|
||||
const chatflowid = data.storeId // fake chatflowid because this is not tied to any chatflow
|
||||
|
||||
|
|
@ -1350,7 +1374,10 @@ const _insertIntoVectorStoreWorkerThread = async (
|
|||
const docs: Document[] = chunks.map((chunk: DocumentStoreFileChunk) => {
|
||||
return new Document({
|
||||
pageContent: chunk.pageContent,
|
||||
metadata: JSON.parse(chunk.metadata)
|
||||
metadata: {
|
||||
...JSON.parse(chunk.metadata),
|
||||
docId: chunk.docId
|
||||
}
|
||||
})
|
||||
})
|
||||
vStoreNodeData.inputs.document = docs
|
||||
|
|
@ -1911,6 +1938,8 @@ const upsertDocStore = async (
|
|||
recordManagerConfig
|
||||
}
|
||||
|
||||
// Use isStrictSave: false to preserve existing configurations during upsert
|
||||
// This allows the operation to reuse existing embedding/vector store/record manager configs
|
||||
const res = await insertIntoVectorStore({
|
||||
appDataSource,
|
||||
componentNodes,
|
||||
|
|
|
|||
|
|
@ -2122,7 +2122,62 @@ export const executeAgentFlow = async ({
|
|||
|
||||
// check if last agentFlowExecutedData.data.output contains the key "content"
|
||||
const lastNodeOutput = agentFlowExecutedData[agentFlowExecutedData.length - 1].data?.output as ICommonObject | undefined
|
||||
const content = (lastNodeOutput?.content as string) ?? ' '
|
||||
let content = (lastNodeOutput?.content as string) ?? ' '
|
||||
|
||||
/* Check for post-processing settings */
|
||||
let chatflowConfig: ICommonObject = {}
|
||||
try {
|
||||
if (chatflow.chatbotConfig) {
|
||||
chatflowConfig = typeof chatflow.chatbotConfig === 'string' ? JSON.parse(chatflow.chatbotConfig) : chatflow.chatbotConfig
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error('[server]: Error parsing chatflow config:', e)
|
||||
}
|
||||
|
||||
if (chatflowConfig?.postProcessing?.enabled === true && content) {
|
||||
try {
|
||||
const postProcessingFunction = JSON.parse(chatflowConfig?.postProcessing?.customFunction)
|
||||
const nodeInstanceFilePath = componentNodes['customFunctionAgentflow'].filePath as string
|
||||
const nodeModule = await import(nodeInstanceFilePath)
|
||||
//set the outputs.output to EndingNode to prevent json escaping of content...
|
||||
const nodeData = {
|
||||
inputs: { customFunctionJavascriptFunction: postProcessingFunction }
|
||||
}
|
||||
const runtimeChatHistory = agentflowRuntime.chatHistory || []
|
||||
const chatHistory = [...pastChatHistory, ...runtimeChatHistory]
|
||||
const options: ICommonObject = {
|
||||
chatflowid: chatflow.id,
|
||||
sessionId,
|
||||
chatId,
|
||||
input: question || form,
|
||||
postProcessing: {
|
||||
rawOutput: content,
|
||||
chatHistory: cloneDeep(chatHistory),
|
||||
sourceDocuments: lastNodeOutput?.sourceDocuments ? cloneDeep(lastNodeOutput.sourceDocuments) : undefined,
|
||||
usedTools: lastNodeOutput?.usedTools ? cloneDeep(lastNodeOutput.usedTools) : undefined,
|
||||
artifacts: lastNodeOutput?.artifacts ? cloneDeep(lastNodeOutput.artifacts) : undefined,
|
||||
fileAnnotations: lastNodeOutput?.fileAnnotations ? cloneDeep(lastNodeOutput.fileAnnotations) : undefined
|
||||
},
|
||||
appDataSource,
|
||||
databaseEntities,
|
||||
workspaceId,
|
||||
orgId,
|
||||
logger
|
||||
}
|
||||
const customFuncNodeInstance = new nodeModule.nodeClass()
|
||||
const customFunctionResponse = await customFuncNodeInstance.run(nodeData, question || form, options)
|
||||
const moderatedResponse = customFunctionResponse.output.content
|
||||
if (typeof moderatedResponse === 'string') {
|
||||
content = moderatedResponse
|
||||
} else if (typeof moderatedResponse === 'object') {
|
||||
content = '```json\n' + JSON.stringify(moderatedResponse, null, 2) + '\n```'
|
||||
} else {
|
||||
content = moderatedResponse
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error('[server]: Post Processing Error:', e)
|
||||
}
|
||||
}
|
||||
|
||||
// remove credentialId from agentFlowExecutedData
|
||||
agentFlowExecutedData = agentFlowExecutedData.map((data) => _removeCredentialId(data))
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import { Request } from 'express'
|
|||
import * as path from 'path'
|
||||
import { DataSource } from 'typeorm'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { omit } from 'lodash'
|
||||
import { omit, cloneDeep } from 'lodash'
|
||||
import {
|
||||
IFileUpload,
|
||||
convertSpeechToText,
|
||||
|
|
@ -817,7 +817,14 @@ export const executeFlow = async ({
|
|||
sessionId,
|
||||
chatId,
|
||||
input: question,
|
||||
rawOutput: resultText,
|
||||
postProcessing: {
|
||||
rawOutput: resultText,
|
||||
chatHistory: cloneDeep(chatHistory),
|
||||
sourceDocuments: result?.sourceDocuments ? cloneDeep(result.sourceDocuments) : undefined,
|
||||
usedTools: result?.usedTools ? cloneDeep(result.usedTools) : undefined,
|
||||
artifacts: result?.artifacts ? cloneDeep(result.artifacts) : undefined,
|
||||
fileAnnotations: result?.fileAnnotations ? cloneDeep(result.fileAnnotations) : undefined
|
||||
},
|
||||
appDataSource,
|
||||
databaseEntities,
|
||||
workspaceId,
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ export const checkUsageLimit = async (
|
|||
if (limit === -1) return
|
||||
|
||||
if (currentUsage > limit) {
|
||||
throw new InternalFlowiseError(StatusCodes.TOO_MANY_REQUESTS, `Limit exceeded: ${type}`)
|
||||
throw new InternalFlowiseError(StatusCodes.PAYMENT_REQUIRED, `Limit exceeded: ${type}`)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -135,7 +135,7 @@ export const checkPredictions = async (orgId: string, subscriptionId: string, us
|
|||
if (predictionsLimit === -1) return
|
||||
|
||||
if (currentPredictions >= predictionsLimit) {
|
||||
throw new InternalFlowiseError(StatusCodes.TOO_MANY_REQUESTS, 'Predictions limit exceeded')
|
||||
throw new InternalFlowiseError(StatusCodes.PAYMENT_REQUIRED, 'Predictions limit exceeded')
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
@ -161,7 +161,7 @@ export const checkStorage = async (orgId: string, subscriptionId: string, usageC
|
|||
if (storageLimit === -1) return
|
||||
|
||||
if (currentStorageUsage >= storageLimit) {
|
||||
throw new InternalFlowiseError(StatusCodes.TOO_MANY_REQUESTS, 'Storage limit exceeded')
|
||||
throw new InternalFlowiseError(StatusCodes.PAYMENT_REQUIRED, 'Storage limit exceeded')
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "flowise-ui",
|
||||
"version": "3.0.10",
|
||||
"version": "3.0.11",
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
"homepage": "https://flowiseai.com",
|
||||
"author": {
|
||||
|
|
|
|||
|
|
@ -22,7 +22,10 @@ const refreshLoader = (storeId) => client.post(`/document-store/refresh/${storeI
|
|||
const insertIntoVectorStore = (body) => client.post(`/document-store/vectorstore/insert`, body)
|
||||
const saveVectorStoreConfig = (body) => client.post(`/document-store/vectorstore/save`, body)
|
||||
const updateVectorStoreConfig = (body) => client.post(`/document-store/vectorstore/update`, body)
|
||||
const deleteVectorStoreDataFromStore = (storeId) => client.delete(`/document-store/vectorstore/${storeId}`)
|
||||
const deleteVectorStoreDataFromStore = (storeId, docId) => {
|
||||
const url = docId ? `/document-store/vectorstore/${storeId}?docId=${docId}` : `/document-store/vectorstore/${storeId}`
|
||||
return client.delete(url)
|
||||
}
|
||||
const queryVectorStore = (body) => client.post(`/document-store/vectorstore/query`, body)
|
||||
const getVectorStoreProviders = () => client.get('/document-store/components/vectorstore')
|
||||
const getEmbeddingProviders = () => client.get('/document-store/components/embeddings')
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ const NavGroup = ({ item }) => {
|
|||
|
||||
const renderNonPrimaryGroups = () => {
|
||||
let nonprimaryGroups = item.children.filter((child) => child.id !== 'primary')
|
||||
// Display chilren based on permission and display
|
||||
// Display children based on permission and display
|
||||
nonprimaryGroups = nonprimaryGroups.map((group) => {
|
||||
const children = group.children.filter((menu) => shouldDisplayMenu(menu))
|
||||
return { ...group, children }
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ const VerifyEmailPage = Loadable(lazy(() => import('@/views/auth/verify-email'))
|
|||
const ForgotPasswordPage = Loadable(lazy(() => import('@/views/auth/forgotPassword')))
|
||||
const ResetPasswordPage = Loadable(lazy(() => import('@/views/auth/resetPassword')))
|
||||
const UnauthorizedPage = Loadable(lazy(() => import('@/views/auth/unauthorized')))
|
||||
const RateLimitedPage = Loadable(lazy(() => import('@/views/auth/rateLimited')))
|
||||
const OrganizationSetupPage = Loadable(lazy(() => import('@/views/organization/index')))
|
||||
const LicenseExpiredPage = Loadable(lazy(() => import('@/views/auth/expired')))
|
||||
|
||||
|
|
@ -45,6 +46,10 @@ const AuthRoutes = {
|
|||
path: '/unauthorized',
|
||||
element: <UnauthorizedPage />
|
||||
},
|
||||
{
|
||||
path: '/rate-limited',
|
||||
element: <RateLimitedPage />
|
||||
},
|
||||
{
|
||||
path: '/organization-setup',
|
||||
element: <OrganizationSetupPage />
|
||||
|
|
|
|||
|
|
@ -10,11 +10,29 @@ const ErrorContext = createContext()
|
|||
|
||||
export const ErrorProvider = ({ children }) => {
|
||||
const [error, setError] = useState(null)
|
||||
const [authRateLimitError, setAuthRateLimitError] = useState(null)
|
||||
const navigate = useNavigate()
|
||||
|
||||
const handleError = async (err) => {
|
||||
console.error(err)
|
||||
if (err?.response?.status === 403) {
|
||||
if (err?.response?.status === 429 && err?.response?.data?.type === 'authentication_rate_limit') {
|
||||
setAuthRateLimitError("You're making a lot of requests. Please wait and try again later.")
|
||||
} else if (err?.response?.status === 429 && err?.response?.data?.type !== 'authentication_rate_limit') {
|
||||
const retryAfterHeader = err?.response?.headers?.['retry-after']
|
||||
let retryAfter = 60 // Default in seconds
|
||||
if (retryAfterHeader) {
|
||||
const parsedSeconds = parseInt(retryAfterHeader, 10)
|
||||
if (Number.isNaN(parsedSeconds)) {
|
||||
const retryDate = new Date(retryAfterHeader)
|
||||
if (!Number.isNaN(retryDate.getTime())) {
|
||||
retryAfter = Math.max(0, Math.ceil((retryDate.getTime() - Date.now()) / 1000))
|
||||
}
|
||||
} else {
|
||||
retryAfter = parsedSeconds
|
||||
}
|
||||
}
|
||||
navigate('/rate-limited', { state: { retryAfter } })
|
||||
} else if (err?.response?.status === 403) {
|
||||
navigate('/unauthorized')
|
||||
} else if (err?.response?.status === 401) {
|
||||
if (ErrorMessage.INVALID_MISSING_TOKEN === err?.response?.data?.message) {
|
||||
|
|
@ -44,7 +62,9 @@ export const ErrorProvider = ({ children }) => {
|
|||
value={{
|
||||
error,
|
||||
setError,
|
||||
handleError
|
||||
handleError,
|
||||
authRateLimitError,
|
||||
setAuthRateLimitError
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ const StyledMenu = styled((props) => (
|
|||
}
|
||||
}))
|
||||
|
||||
export default function FlowListMenu({ chatflow, isAgentCanvas, isAgentflowV2, setError, updateFlowsApi }) {
|
||||
export default function FlowListMenu({ chatflow, isAgentCanvas, isAgentflowV2, setError, updateFlowsApi, currentPage, pageLimit }) {
|
||||
const { confirm } = useConfirm()
|
||||
const dispatch = useDispatch()
|
||||
const updateChatflowApi = useApi(chatflowsApi.updateChatflow)
|
||||
|
|
@ -166,10 +166,16 @@ export default function FlowListMenu({ chatflow, isAgentCanvas, isAgentflowV2, s
|
|||
}
|
||||
try {
|
||||
await updateChatflowApi.request(chatflow.id, updateBody)
|
||||
const params = {
|
||||
page: currentPage,
|
||||
limit: pageLimit
|
||||
}
|
||||
if (isAgentCanvas && isAgentflowV2) {
|
||||
await updateFlowsApi.request('AGENTFLOW')
|
||||
await updateFlowsApi.request('AGENTFLOW', params)
|
||||
} else if (isAgentCanvas) {
|
||||
await updateFlowsApi.request('MULTIAGENT', params)
|
||||
} else {
|
||||
await updateFlowsApi.request(isAgentCanvas ? 'MULTIAGENT' : undefined)
|
||||
await updateFlowsApi.request(params)
|
||||
}
|
||||
} catch (error) {
|
||||
if (setError) setError(error)
|
||||
|
|
@ -209,7 +215,15 @@ export default function FlowListMenu({ chatflow, isAgentCanvas, isAgentflowV2, s
|
|||
}
|
||||
try {
|
||||
await updateChatflowApi.request(chatflow.id, updateBody)
|
||||
await updateFlowsApi.request(isAgentCanvas ? 'AGENTFLOW' : undefined)
|
||||
const params = {
|
||||
page: currentPage,
|
||||
limit: pageLimit
|
||||
}
|
||||
if (isAgentCanvas) {
|
||||
await updateFlowsApi.request('AGENTFLOW', params)
|
||||
} else {
|
||||
await updateFlowsApi.request(params)
|
||||
}
|
||||
} catch (error) {
|
||||
if (setError) setError(error)
|
||||
enqueueSnackbar({
|
||||
|
|
@ -241,10 +255,16 @@ export default function FlowListMenu({ chatflow, isAgentCanvas, isAgentflowV2, s
|
|||
if (isConfirmed) {
|
||||
try {
|
||||
await chatflowsApi.deleteChatflow(chatflow.id)
|
||||
const params = {
|
||||
page: currentPage,
|
||||
limit: pageLimit
|
||||
}
|
||||
if (isAgentCanvas && isAgentflowV2) {
|
||||
await updateFlowsApi.request('AGENTFLOW')
|
||||
await updateFlowsApi.request('AGENTFLOW', params)
|
||||
} else if (isAgentCanvas) {
|
||||
await updateFlowsApi.request('MULTIAGENT', params)
|
||||
} else {
|
||||
await updateFlowsApi.request(isAgentCanvas ? 'MULTIAGENT' : undefined)
|
||||
await updateFlowsApi.request(params)
|
||||
}
|
||||
} catch (error) {
|
||||
if (setError) setError(error)
|
||||
|
|
@ -454,5 +474,7 @@ FlowListMenu.propTypes = {
|
|||
isAgentCanvas: PropTypes.bool,
|
||||
isAgentflowV2: PropTypes.bool,
|
||||
setError: PropTypes.func,
|
||||
updateFlowsApi: PropTypes.object
|
||||
updateFlowsApi: PropTypes.object,
|
||||
currentPage: PropTypes.number,
|
||||
pageLimit: PropTypes.number
|
||||
}
|
||||
|
|
|
|||
|
|
@ -53,8 +53,7 @@ const CHATFLOW_CONFIGURATION_TABS = [
|
|||
},
|
||||
{
|
||||
label: 'Post Processing',
|
||||
id: 'postProcessing',
|
||||
hideInAgentFlow: true
|
||||
id: 'postProcessing'
|
||||
}
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -16,11 +16,11 @@ import { useEditor, EditorContent } from '@tiptap/react'
|
|||
import Placeholder from '@tiptap/extension-placeholder'
|
||||
import { mergeAttributes } from '@tiptap/core'
|
||||
import StarterKit from '@tiptap/starter-kit'
|
||||
import Mention from '@tiptap/extension-mention'
|
||||
import CodeBlockLowlight from '@tiptap/extension-code-block-lowlight'
|
||||
import { common, createLowlight } from 'lowlight'
|
||||
import { suggestionOptions } from '@/ui-component/input/suggestionOption'
|
||||
import { getAvailableNodesForVariable } from '@/utils/genericHelper'
|
||||
import { CustomMention } from '@/utils/customMention'
|
||||
|
||||
const lowlight = createLowlight(common)
|
||||
|
||||
|
|
@ -78,7 +78,7 @@ const extensions = (availableNodesForVariable, availableState, acceptNodeOutputA
|
|||
StarterKit.configure({
|
||||
codeBlock: false
|
||||
}),
|
||||
Mention.configure({
|
||||
CustomMention.configure({
|
||||
HTMLAttributes: {
|
||||
class: 'variable'
|
||||
},
|
||||
|
|
|
|||
|
|
@ -4,8 +4,25 @@ import PropTypes from 'prop-types'
|
|||
import { useSelector } from 'react-redux'
|
||||
|
||||
// material-ui
|
||||
import { IconButton, Button, Box, Typography } from '@mui/material'
|
||||
import { IconArrowsMaximize, IconBulb, IconX } from '@tabler/icons-react'
|
||||
import {
|
||||
IconButton,
|
||||
Button,
|
||||
Box,
|
||||
Typography,
|
||||
TableContainer,
|
||||
Table,
|
||||
TableHead,
|
||||
TableBody,
|
||||
TableRow,
|
||||
TableCell,
|
||||
Paper,
|
||||
Accordion,
|
||||
AccordionSummary,
|
||||
AccordionDetails,
|
||||
Card
|
||||
} from '@mui/material'
|
||||
import { IconArrowsMaximize, IconX } from '@tabler/icons-react'
|
||||
import ExpandMoreIcon from '@mui/icons-material/ExpandMore'
|
||||
import { useTheme } from '@mui/material/styles'
|
||||
|
||||
// Project import
|
||||
|
|
@ -21,7 +38,11 @@ import useNotifier from '@/utils/useNotifier'
|
|||
// API
|
||||
import chatflowsApi from '@/api/chatflows'
|
||||
|
||||
const sampleFunction = `return $flow.rawOutput + " This is a post processed response!";`
|
||||
const sampleFunction = `// Access chat history as a string
|
||||
const chatHistory = JSON.stringify($flow.chatHistory, null, 2);
|
||||
|
||||
// Return a modified response
|
||||
return $flow.rawOutput + " This is a post processed response!";`
|
||||
|
||||
const PostProcessing = ({ dialogProps }) => {
|
||||
const dispatch = useDispatch()
|
||||
|
|
@ -175,31 +196,105 @@ const PostProcessing = ({ dialogProps }) => {
|
|||
/>
|
||||
</div>
|
||||
</Box>
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
borderRadius: 10,
|
||||
background: '#d8f3dc',
|
||||
padding: 10,
|
||||
marginTop: 10
|
||||
}}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
flexDirection: 'row',
|
||||
alignItems: 'center',
|
||||
paddingTop: 10
|
||||
<Card sx={{ borderColor: theme.palette.primary[200] + 75, mt: 2, mb: 2 }} variant='outlined'>
|
||||
<Accordion
|
||||
disableGutters
|
||||
sx={{
|
||||
'&:before': {
|
||||
display: 'none'
|
||||
}
|
||||
}}
|
||||
>
|
||||
<IconBulb size={30} color='#2d6a4f' />
|
||||
<span style={{ color: '#2d6a4f', marginLeft: 10, fontWeight: 500 }}>
|
||||
The following variables are available to use in the custom function:{' '}
|
||||
<pre>$flow.rawOutput, $flow.input, $flow.chatflowId, $flow.sessionId, $flow.chatId</pre>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
|
||||
<Typography>Available Variables</Typography>
|
||||
</AccordionSummary>
|
||||
<AccordionDetails sx={{ p: 0 }}>
|
||||
<TableContainer component={Paper}>
|
||||
<Table aria-label='available variables table'>
|
||||
<TableHead>
|
||||
<TableRow>
|
||||
<TableCell sx={{ width: '30%' }}>Variable</TableCell>
|
||||
<TableCell sx={{ width: '15%' }}>Type</TableCell>
|
||||
<TableCell sx={{ width: '55%' }}>Description</TableCell>
|
||||
</TableRow>
|
||||
</TableHead>
|
||||
<TableBody>
|
||||
<TableRow>
|
||||
<TableCell>
|
||||
<code>$flow.rawOutput</code>
|
||||
</TableCell>
|
||||
<TableCell>string</TableCell>
|
||||
<TableCell>The raw output response from the flow</TableCell>
|
||||
</TableRow>
|
||||
<TableRow>
|
||||
<TableCell>
|
||||
<code>$flow.input</code>
|
||||
</TableCell>
|
||||
<TableCell>string</TableCell>
|
||||
<TableCell>The user input message</TableCell>
|
||||
</TableRow>
|
||||
<TableRow>
|
||||
<TableCell>
|
||||
<code>$flow.chatHistory</code>
|
||||
</TableCell>
|
||||
<TableCell>array</TableCell>
|
||||
<TableCell>Array of previous messages in the conversation</TableCell>
|
||||
</TableRow>
|
||||
<TableRow>
|
||||
<TableCell>
|
||||
<code>$flow.chatflowId</code>
|
||||
</TableCell>
|
||||
<TableCell>string</TableCell>
|
||||
<TableCell>Unique identifier for the chatflow</TableCell>
|
||||
</TableRow>
|
||||
<TableRow>
|
||||
<TableCell>
|
||||
<code>$flow.sessionId</code>
|
||||
</TableCell>
|
||||
<TableCell>string</TableCell>
|
||||
<TableCell>Current session identifier</TableCell>
|
||||
</TableRow>
|
||||
<TableRow>
|
||||
<TableCell>
|
||||
<code>$flow.chatId</code>
|
||||
</TableCell>
|
||||
<TableCell>string</TableCell>
|
||||
<TableCell>Current chat identifier</TableCell>
|
||||
</TableRow>
|
||||
<TableRow>
|
||||
<TableCell>
|
||||
<code>$flow.sourceDocuments</code>
|
||||
</TableCell>
|
||||
<TableCell>array</TableCell>
|
||||
<TableCell>Source documents used in retrieval (if applicable)</TableCell>
|
||||
</TableRow>
|
||||
<TableRow>
|
||||
<TableCell>
|
||||
<code>$flow.usedTools</code>
|
||||
</TableCell>
|
||||
<TableCell>array</TableCell>
|
||||
<TableCell>List of tools used during execution</TableCell>
|
||||
</TableRow>
|
||||
<TableRow>
|
||||
<TableCell>
|
||||
<code>$flow.artifacts</code>
|
||||
</TableCell>
|
||||
<TableCell>array</TableCell>
|
||||
<TableCell>List of artifacts generated during execution</TableCell>
|
||||
</TableRow>
|
||||
<TableRow>
|
||||
<TableCell sx={{ borderBottom: 'none' }}>
|
||||
<code>$flow.fileAnnotations</code>
|
||||
</TableCell>
|
||||
<TableCell sx={{ borderBottom: 'none' }}>array</TableCell>
|
||||
<TableCell sx={{ borderBottom: 'none' }}>File annotations associated with the response</TableCell>
|
||||
</TableRow>
|
||||
</TableBody>
|
||||
</Table>
|
||||
</TableContainer>
|
||||
</AccordionDetails>
|
||||
</Accordion>
|
||||
</Card>
|
||||
<StyledButton
|
||||
style={{ marginBottom: 10, marginTop: 10 }}
|
||||
variant='contained'
|
||||
|
|
|
|||
|
|
@ -7,11 +7,11 @@ import { mergeAttributes } from '@tiptap/core'
|
|||
import StarterKit from '@tiptap/starter-kit'
|
||||
import { styled } from '@mui/material/styles'
|
||||
import { Box } from '@mui/material'
|
||||
import Mention from '@tiptap/extension-mention'
|
||||
import CodeBlockLowlight from '@tiptap/extension-code-block-lowlight'
|
||||
import { common, createLowlight } from 'lowlight'
|
||||
import { suggestionOptions } from './suggestionOption'
|
||||
import { getAvailableNodesForVariable } from '@/utils/genericHelper'
|
||||
import { CustomMention } from '@/utils/customMention'
|
||||
|
||||
const lowlight = createLowlight(common)
|
||||
|
||||
|
|
@ -20,7 +20,7 @@ const extensions = (availableNodesForVariable, availableState, acceptNodeOutputA
|
|||
StarterKit.configure({
|
||||
codeBlock: false
|
||||
}),
|
||||
Mention.configure({
|
||||
CustomMention.configure({
|
||||
HTMLAttributes: {
|
||||
class: 'variable'
|
||||
},
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ export const suggestionOptions = (
|
|||
category: 'Node Outputs'
|
||||
})
|
||||
|
||||
const structuredOutputs = nodeData?.inputs?.llmStructuredOutput ?? []
|
||||
const structuredOutputs = nodeData?.inputs?.llmStructuredOutput ?? nodeData?.inputs?.agentStructuredOutput ?? []
|
||||
if (structuredOutputs && structuredOutputs.length > 0) {
|
||||
structuredOutputs.forEach((item) => {
|
||||
defaultItems.unshift({
|
||||
|
|
|
|||
|
|
@ -59,7 +59,9 @@ export const FlowListTable = ({
|
|||
updateFlowsApi,
|
||||
setError,
|
||||
isAgentCanvas,
|
||||
isAgentflowV2
|
||||
isAgentflowV2,
|
||||
currentPage,
|
||||
pageLimit
|
||||
}) => {
|
||||
const { hasPermission } = useAuth()
|
||||
const isActionsAvailable = isAgentCanvas
|
||||
|
|
@ -331,6 +333,8 @@ export const FlowListTable = ({
|
|||
chatflow={row}
|
||||
setError={setError}
|
||||
updateFlowsApi={updateFlowsApi}
|
||||
currentPage={currentPage}
|
||||
pageLimit={pageLimit}
|
||||
/>
|
||||
</Stack>
|
||||
</StyledTableCell>
|
||||
|
|
@ -355,5 +359,7 @@ FlowListTable.propTypes = {
|
|||
updateFlowsApi: PropTypes.object,
|
||||
setError: PropTypes.func,
|
||||
isAgentCanvas: PropTypes.bool,
|
||||
isAgentflowV2: PropTypes.bool
|
||||
isAgentflowV2: PropTypes.bool,
|
||||
currentPage: PropTypes.number,
|
||||
pageLimit: PropTypes.number
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
import Mention from '@tiptap/extension-mention'
|
||||
import { PasteRule } from '@tiptap/core'
|
||||
|
||||
export const CustomMention = Mention.extend({
|
||||
renderText({ node }) {
|
||||
return `{{${node.attrs.label ?? node.attrs.id}}}`
|
||||
},
|
||||
addPasteRules() {
|
||||
return [
|
||||
new PasteRule({
|
||||
find: /\{\{([^{}]+)\}\}/g,
|
||||
handler: ({ match, chain, range }) => {
|
||||
const label = match[1].trim()
|
||||
if (label) {
|
||||
chain()
|
||||
.deleteRange(range)
|
||||
.insertContentAt(range.from, {
|
||||
type: this.name,
|
||||
attrs: { id: label, label: label }
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
]
|
||||
}
|
||||
})
|
||||
|
|
@ -325,6 +325,8 @@ const Agentflows = () => {
|
|||
filterFunction={filterFlows}
|
||||
updateFlowsApi={getAllAgentflows}
|
||||
setError={setError}
|
||||
currentPage={currentPage}
|
||||
pageLimit={pageLimit}
|
||||
/>
|
||||
)}
|
||||
{/* Pagination and Page Size Controls */}
|
||||
|
|
|
|||
|
|
@ -150,6 +150,8 @@ const AgentFlowNode = ({ data }) => {
|
|||
return <IconWorldWww size={14} color={'white'} />
|
||||
case 'googleSearch':
|
||||
return <IconBrandGoogle size={14} color={'white'} />
|
||||
case 'codeExecution':
|
||||
return <IconCode size={14} color={'white'} />
|
||||
default:
|
||||
return null
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import accountApi from '@/api/account.api'
|
|||
// Hooks
|
||||
import useApi from '@/hooks/useApi'
|
||||
import { useConfig } from '@/store/context/ConfigContext'
|
||||
import { useError } from '@/store/context/ErrorContext'
|
||||
|
||||
// utils
|
||||
import useNotifier from '@/utils/useNotifier'
|
||||
|
|
@ -41,10 +42,13 @@ const ForgotPasswordPage = () => {
|
|||
const [isLoading, setLoading] = useState(false)
|
||||
const [responseMsg, setResponseMsg] = useState(undefined)
|
||||
|
||||
const { authRateLimitError, setAuthRateLimitError } = useError()
|
||||
|
||||
const forgotPasswordApi = useApi(accountApi.forgotPassword)
|
||||
|
||||
const sendResetRequest = async (event) => {
|
||||
event.preventDefault()
|
||||
setAuthRateLimitError(null)
|
||||
const body = {
|
||||
user: {
|
||||
email: usernameVal
|
||||
|
|
@ -54,6 +58,11 @@ const ForgotPasswordPage = () => {
|
|||
await forgotPasswordApi.request(body)
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
setAuthRateLimitError(null)
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [setAuthRateLimitError])
|
||||
|
||||
useEffect(() => {
|
||||
if (forgotPasswordApi.error) {
|
||||
const errMessage =
|
||||
|
|
@ -89,6 +98,11 @@ const ForgotPasswordPage = () => {
|
|||
{responseMsg.msg}
|
||||
</Alert>
|
||||
)}
|
||||
{authRateLimitError && (
|
||||
<Alert icon={<IconExclamationCircle />} variant='filled' severity='error'>
|
||||
{authRateLimitError}
|
||||
</Alert>
|
||||
)}
|
||||
{responseMsg && responseMsg?.type !== 'error' && (
|
||||
<Alert icon={<IconCircleCheck />} variant='filled' severity='success'>
|
||||
{responseMsg.msg}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,51 @@
|
|||
import { Box, Button, Stack, Typography } from '@mui/material'
|
||||
import { Link, useLocation } from 'react-router-dom'
|
||||
import unauthorizedSVG from '@/assets/images/unauthorized.svg'
|
||||
import MainCard from '@/ui-component/cards/MainCard'
|
||||
|
||||
// ==============================|| RateLimitedPage ||============================== //
|
||||
|
||||
const RateLimitedPage = () => {
|
||||
const location = useLocation()
|
||||
|
||||
const retryAfter = location.state?.retryAfter || 60
|
||||
|
||||
return (
|
||||
<MainCard>
|
||||
<Box
|
||||
sx={{
|
||||
display: 'flex',
|
||||
justifyContent: 'center',
|
||||
alignItems: 'center',
|
||||
height: 'calc(100vh - 210px)'
|
||||
}}
|
||||
>
|
||||
<Stack
|
||||
sx={{
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
maxWidth: '500px'
|
||||
}}
|
||||
flexDirection='column'
|
||||
>
|
||||
<Box sx={{ p: 2, height: 'auto' }}>
|
||||
<img style={{ objectFit: 'cover', height: '20vh', width: 'auto' }} src={unauthorizedSVG} alt='rateLimitedSVG' />
|
||||
</Box>
|
||||
<Typography sx={{ mb: 2 }} variant='h4' component='div' fontWeight='bold'>
|
||||
429 Too Many Requests
|
||||
</Typography>
|
||||
<Typography variant='body1' component='div' sx={{ mb: 2, textAlign: 'center' }}>
|
||||
{`You have made too many requests in a short period of time. Please wait ${retryAfter}s before trying again.`}
|
||||
</Typography>
|
||||
<Link to='/'>
|
||||
<Button variant='contained' color='primary'>
|
||||
Back to Home
|
||||
</Button>
|
||||
</Link>
|
||||
</Stack>
|
||||
</Box>
|
||||
</MainCard>
|
||||
)
|
||||
}
|
||||
|
||||
export default RateLimitedPage
|
||||
|
|
@ -18,6 +18,7 @@ import ssoApi from '@/api/sso'
|
|||
// Hooks
|
||||
import useApi from '@/hooks/useApi'
|
||||
import { useConfig } from '@/store/context/ConfigContext'
|
||||
import { useError } from '@/store/context/ErrorContext'
|
||||
|
||||
// utils
|
||||
import useNotifier from '@/utils/useNotifier'
|
||||
|
|
@ -111,7 +112,9 @@ const RegisterPage = () => {
|
|||
|
||||
const [loading, setLoading] = useState(false)
|
||||
const [authError, setAuthError] = useState('')
|
||||
const [successMsg, setSuccessMsg] = useState(undefined)
|
||||
const [successMsg, setSuccessMsg] = useState('')
|
||||
|
||||
const { authRateLimitError, setAuthRateLimitError } = useError()
|
||||
|
||||
const registerApi = useApi(accountApi.registerAccount)
|
||||
const ssoLoginApi = useApi(ssoApi.ssoLogin)
|
||||
|
|
@ -120,6 +123,7 @@ const RegisterPage = () => {
|
|||
|
||||
const register = async (event) => {
|
||||
event.preventDefault()
|
||||
setAuthRateLimitError(null)
|
||||
if (isEnterpriseLicensed) {
|
||||
const result = RegisterEnterpriseUserSchema.safeParse({
|
||||
username,
|
||||
|
|
@ -192,6 +196,7 @@ const RegisterPage = () => {
|
|||
}, [registerApi.error])
|
||||
|
||||
useEffect(() => {
|
||||
setAuthRateLimitError(null)
|
||||
if (!isOpenSource) {
|
||||
getDefaultProvidersApi.request()
|
||||
}
|
||||
|
|
@ -274,6 +279,11 @@ const RegisterPage = () => {
|
|||
)}
|
||||
</Alert>
|
||||
)}
|
||||
{authRateLimitError && (
|
||||
<Alert icon={<IconExclamationCircle />} variant='filled' severity='error'>
|
||||
{authRateLimitError}
|
||||
</Alert>
|
||||
)}
|
||||
{successMsg && (
|
||||
<Alert icon={<IconCircleCheck />} variant='filled' severity='success'>
|
||||
{successMsg}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { useState } from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useDispatch } from 'react-redux'
|
||||
import { Link, useNavigate, useSearchParams } from 'react-router-dom'
|
||||
|
||||
|
|
@ -19,6 +19,9 @@ import accountApi from '@/api/account.api'
|
|||
import useNotifier from '@/utils/useNotifier'
|
||||
import { validatePassword } from '@/utils/validation'
|
||||
|
||||
// Hooks
|
||||
import { useError } from '@/store/context/ErrorContext'
|
||||
|
||||
// Icons
|
||||
import { IconExclamationCircle, IconX } from '@tabler/icons-react'
|
||||
|
||||
|
|
@ -70,6 +73,8 @@ const ResetPasswordPage = () => {
|
|||
const [loading, setLoading] = useState(false)
|
||||
const [authErrors, setAuthErrors] = useState([])
|
||||
|
||||
const { authRateLimitError, setAuthRateLimitError } = useError()
|
||||
|
||||
const goLogin = () => {
|
||||
navigate('/signin', { replace: true })
|
||||
}
|
||||
|
|
@ -78,6 +83,7 @@ const ResetPasswordPage = () => {
|
|||
event.preventDefault()
|
||||
const validationErrors = []
|
||||
setAuthErrors([])
|
||||
setAuthRateLimitError(null)
|
||||
if (!tokenVal) {
|
||||
validationErrors.push('Token cannot be left blank!')
|
||||
}
|
||||
|
|
@ -142,6 +148,11 @@ const ResetPasswordPage = () => {
|
|||
}
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
setAuthRateLimitError(null)
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<>
|
||||
<MainCard>
|
||||
|
|
@ -155,6 +166,11 @@ const ResetPasswordPage = () => {
|
|||
</ul>
|
||||
</Alert>
|
||||
)}
|
||||
{authRateLimitError && (
|
||||
<Alert icon={<IconExclamationCircle />} variant='filled' severity='error'>
|
||||
{authRateLimitError}
|
||||
</Alert>
|
||||
)}
|
||||
<Stack sx={{ gap: 1 }}>
|
||||
<Typography variant='h1'>Reset Password</Typography>
|
||||
<Typography variant='body2' sx={{ color: theme.palette.grey[600] }}>
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import { Input } from '@/ui-component/input/Input'
|
|||
// Hooks
|
||||
import useApi from '@/hooks/useApi'
|
||||
import { useConfig } from '@/store/context/ConfigContext'
|
||||
import { useError } from '@/store/context/ErrorContext'
|
||||
|
||||
// API
|
||||
import authApi from '@/api/auth'
|
||||
|
|
@ -62,6 +63,8 @@ const SignInPage = () => {
|
|||
const [showResendButton, setShowResendButton] = useState(false)
|
||||
const [successMessage, setSuccessMessage] = useState('')
|
||||
|
||||
const { authRateLimitError, setAuthRateLimitError } = useError()
|
||||
|
||||
const loginApi = useApi(authApi.login)
|
||||
const ssoLoginApi = useApi(ssoApi.ssoLogin)
|
||||
const getDefaultProvidersApi = useApi(loginMethodApi.getDefaultLoginMethods)
|
||||
|
|
@ -71,6 +74,7 @@ const SignInPage = () => {
|
|||
|
||||
const doLogin = (event) => {
|
||||
event.preventDefault()
|
||||
setAuthRateLimitError(null)
|
||||
setLoading(true)
|
||||
const body = {
|
||||
email: usernameVal,
|
||||
|
|
@ -92,11 +96,12 @@ const SignInPage = () => {
|
|||
|
||||
useEffect(() => {
|
||||
store.dispatch(logoutSuccess())
|
||||
setAuthRateLimitError(null)
|
||||
if (!isOpenSource) {
|
||||
getDefaultProvidersApi.request()
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
}, [setAuthRateLimitError, isOpenSource])
|
||||
|
||||
useEffect(() => {
|
||||
// Parse the "user" query parameter from the URL
|
||||
|
|
@ -179,6 +184,11 @@ const SignInPage = () => {
|
|||
{successMessage}
|
||||
</Alert>
|
||||
)}
|
||||
{authRateLimitError && (
|
||||
<Alert icon={<IconExclamationCircle />} variant='filled' severity='error'>
|
||||
{authRateLimitError}
|
||||
</Alert>
|
||||
)}
|
||||
{authError && (
|
||||
<Alert icon={<IconExclamationCircle />} variant='filled' severity='error'>
|
||||
{authError}
|
||||
|
|
|
|||
|
|
@ -208,6 +208,8 @@ const Chatflows = () => {
|
|||
filterFunction={filterFlows}
|
||||
updateFlowsApi={getAllChatflowsApi}
|
||||
setError={setError}
|
||||
currentPage={currentPage}
|
||||
pageLimit={pageLimit}
|
||||
/>
|
||||
)}
|
||||
{/* Pagination and Page Size Controls */}
|
||||
|
|
|
|||
|
|
@ -18,11 +18,15 @@ import {
|
|||
TableContainer,
|
||||
TableRow,
|
||||
TableCell,
|
||||
Checkbox,
|
||||
FormControlLabel,
|
||||
DialogActions
|
||||
DialogActions,
|
||||
Card,
|
||||
Stack,
|
||||
Link
|
||||
} from '@mui/material'
|
||||
import { useTheme } from '@mui/material/styles'
|
||||
import ExpandMoreIcon from '@mui/icons-material/ExpandMore'
|
||||
import SettingsIcon from '@mui/icons-material/Settings'
|
||||
import { IconAlertTriangle } from '@tabler/icons-react'
|
||||
import { TableViewOnly } from '@/ui-component/table/Table'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
|
|
@ -36,12 +40,13 @@ import { initNode } from '@/utils/genericHelper'
|
|||
|
||||
const DeleteDocStoreDialog = ({ show, dialogProps, onCancel, onDelete }) => {
|
||||
const portalElement = document.getElementById('portal')
|
||||
const theme = useTheme()
|
||||
const [nodeConfigExpanded, setNodeConfigExpanded] = useState({})
|
||||
const [removeFromVS, setRemoveFromVS] = useState(false)
|
||||
const [vsFlowData, setVSFlowData] = useState([])
|
||||
const [rmFlowData, setRMFlowData] = useState([])
|
||||
|
||||
const getSpecificNodeApi = useApi(nodesApi.getSpecificNode)
|
||||
const getVectorStoreNodeApi = useApi(nodesApi.getSpecificNode)
|
||||
const getRecordManagerNodeApi = useApi(nodesApi.getSpecificNode)
|
||||
|
||||
const handleAccordionChange = (nodeName) => (event, isExpanded) => {
|
||||
const accordianNodes = { ...nodeConfigExpanded }
|
||||
|
|
@ -52,42 +57,37 @@ const DeleteDocStoreDialog = ({ show, dialogProps, onCancel, onDelete }) => {
|
|||
useEffect(() => {
|
||||
if (dialogProps.recordManagerConfig) {
|
||||
const nodeName = dialogProps.recordManagerConfig.name
|
||||
if (nodeName) getSpecificNodeApi.request(nodeName)
|
||||
if (nodeName) getRecordManagerNodeApi.request(nodeName)
|
||||
}
|
||||
|
||||
if (dialogProps.vectorStoreConfig) {
|
||||
const nodeName = dialogProps.vectorStoreConfig.name
|
||||
if (nodeName) getSpecificNodeApi.request(nodeName)
|
||||
}
|
||||
if (dialogProps.vectorStoreConfig) {
|
||||
const nodeName = dialogProps.vectorStoreConfig.name
|
||||
if (nodeName) getVectorStoreNodeApi.request(nodeName)
|
||||
}
|
||||
|
||||
return () => {
|
||||
setNodeConfigExpanded({})
|
||||
setRemoveFromVS(false)
|
||||
setVSFlowData([])
|
||||
setRMFlowData([])
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [dialogProps])
|
||||
|
||||
// Process Vector Store node data
|
||||
useEffect(() => {
|
||||
if (getSpecificNodeApi.data) {
|
||||
const nodeData = cloneDeep(initNode(getSpecificNodeApi.data, uuidv4()))
|
||||
|
||||
let config = 'vectorStoreConfig'
|
||||
if (nodeData.category === 'Record Manager') config = 'recordManagerConfig'
|
||||
if (getVectorStoreNodeApi.data && dialogProps.vectorStoreConfig) {
|
||||
const nodeData = cloneDeep(initNode(getVectorStoreNodeApi.data, uuidv4()))
|
||||
|
||||
const paramValues = []
|
||||
|
||||
for (const inputName in dialogProps[config].config) {
|
||||
for (const inputName in dialogProps.vectorStoreConfig.config) {
|
||||
const inputParam = nodeData.inputParams.find((inp) => inp.name === inputName)
|
||||
|
||||
if (!inputParam) continue
|
||||
|
||||
if (inputParam.type === 'credential') continue
|
||||
|
||||
let paramValue = {}
|
||||
|
||||
const inputValue = dialogProps[config].config[inputName]
|
||||
const inputValue = dialogProps.vectorStoreConfig.config[inputName]
|
||||
|
||||
if (!inputValue) continue
|
||||
|
||||
|
|
@ -95,40 +95,71 @@ const DeleteDocStoreDialog = ({ show, dialogProps, onCancel, onDelete }) => {
|
|||
continue
|
||||
}
|
||||
|
||||
paramValue = {
|
||||
paramValues.push({
|
||||
label: inputParam?.label,
|
||||
name: inputParam?.name,
|
||||
type: inputParam?.type,
|
||||
value: inputValue
|
||||
}
|
||||
paramValues.push(paramValue)
|
||||
})
|
||||
}
|
||||
|
||||
if (config === 'vectorStoreConfig') {
|
||||
setVSFlowData([
|
||||
{
|
||||
label: nodeData.label,
|
||||
name: nodeData.name,
|
||||
category: nodeData.category,
|
||||
id: nodeData.id,
|
||||
paramValues
|
||||
}
|
||||
])
|
||||
} else if (config === 'recordManagerConfig') {
|
||||
setRMFlowData([
|
||||
{
|
||||
label: nodeData.label,
|
||||
name: nodeData.name,
|
||||
category: nodeData.category,
|
||||
id: nodeData.id,
|
||||
paramValues
|
||||
}
|
||||
])
|
||||
}
|
||||
setVSFlowData([
|
||||
{
|
||||
label: nodeData.label,
|
||||
name: nodeData.name,
|
||||
category: nodeData.category,
|
||||
id: nodeData.id,
|
||||
paramValues
|
||||
}
|
||||
])
|
||||
}
|
||||
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [getSpecificNodeApi.data])
|
||||
}, [getVectorStoreNodeApi.data])
|
||||
|
||||
// Process Record Manager node data
|
||||
useEffect(() => {
|
||||
if (getRecordManagerNodeApi.data && dialogProps.recordManagerConfig) {
|
||||
const nodeData = cloneDeep(initNode(getRecordManagerNodeApi.data, uuidv4()))
|
||||
|
||||
const paramValues = []
|
||||
|
||||
for (const inputName in dialogProps.recordManagerConfig.config) {
|
||||
const inputParam = nodeData.inputParams.find((inp) => inp.name === inputName)
|
||||
|
||||
if (!inputParam) continue
|
||||
|
||||
if (inputParam.type === 'credential') continue
|
||||
|
||||
const inputValue = dialogProps.recordManagerConfig.config[inputName]
|
||||
|
||||
if (!inputValue) continue
|
||||
|
||||
if (typeof inputValue === 'string' && inputValue.startsWith('{{') && inputValue.endsWith('}}')) {
|
||||
continue
|
||||
}
|
||||
|
||||
paramValues.push({
|
||||
label: inputParam?.label,
|
||||
name: inputParam?.name,
|
||||
type: inputParam?.type,
|
||||
value: inputValue
|
||||
})
|
||||
}
|
||||
|
||||
setRMFlowData([
|
||||
{
|
||||
label: nodeData.label,
|
||||
name: nodeData.name,
|
||||
category: nodeData.category,
|
||||
id: nodeData.id,
|
||||
paramValues
|
||||
}
|
||||
])
|
||||
}
|
||||
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [getRecordManagerNodeApi.data])
|
||||
|
||||
const component = show ? (
|
||||
<Dialog
|
||||
|
|
@ -142,91 +173,130 @@ const DeleteDocStoreDialog = ({ show, dialogProps, onCancel, onDelete }) => {
|
|||
<DialogTitle sx={{ fontSize: '1rem', p: 3, pb: 0 }} id='alert-dialog-title'>
|
||||
{dialogProps.title}
|
||||
</DialogTitle>
|
||||
<DialogContent sx={{ display: 'flex', flexDirection: 'column', gap: 2, maxHeight: '75vh', position: 'relative', px: 3, pb: 3 }}>
|
||||
<DialogContent
|
||||
sx={{
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
gap: 2,
|
||||
maxHeight: '75vh',
|
||||
position: 'relative',
|
||||
px: 3,
|
||||
pb: 3,
|
||||
overflow: 'auto'
|
||||
}}
|
||||
>
|
||||
<span style={{ marginTop: '20px' }}>{dialogProps.description}</span>
|
||||
{dialogProps.type === 'STORE' && dialogProps.recordManagerConfig && (
|
||||
<FormControlLabel
|
||||
control={<Checkbox checked={removeFromVS} onChange={(event) => setRemoveFromVS(event.target.checked)} />}
|
||||
label='Remove data from vector store and record manager'
|
||||
/>
|
||||
{dialogProps.vectorStoreConfig && !dialogProps.recordManagerConfig && (
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
flexDirection: 'row',
|
||||
alignItems: 'center',
|
||||
borderRadius: 10,
|
||||
background: 'rgb(254,252,191)',
|
||||
padding: 10
|
||||
}}
|
||||
>
|
||||
<IconAlertTriangle size={70} color='orange' />
|
||||
<span style={{ color: 'rgb(116,66,16)', marginLeft: 10 }}>
|
||||
<strong>Note:</strong> Without a Record Manager configured, only the document chunks will be removed from the
|
||||
document store. The actual vector embeddings in your vector store database will remain unchanged. To enable
|
||||
automatic cleanup of vector store data, please configure a Record Manager.{' '}
|
||||
<Link
|
||||
href='https://docs.flowiseai.com/integrations/langchain/record-managers'
|
||||
target='_blank'
|
||||
rel='noopener noreferrer'
|
||||
sx={{ fontWeight: 500, color: 'rgb(116,66,16)', textDecoration: 'underline' }}
|
||||
>
|
||||
Learn more
|
||||
</Link>
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
{removeFromVS && (
|
||||
<div>
|
||||
<TableContainer component={Paper}>
|
||||
<Table sx={{ minWidth: 650 }} aria-label='simple table'>
|
||||
<TableBody>
|
||||
<TableRow sx={{ '& td': { border: 0 } }}>
|
||||
<TableCell sx={{ pb: 0, pt: 0 }} colSpan={6}>
|
||||
<Box>
|
||||
{([...vsFlowData, ...rmFlowData] || []).map((node, index) => {
|
||||
return (
|
||||
<Accordion
|
||||
expanded={nodeConfigExpanded[node.name] || true}
|
||||
onChange={handleAccordionChange(node.name)}
|
||||
key={index}
|
||||
disableGutters
|
||||
>
|
||||
<AccordionSummary
|
||||
expandIcon={<ExpandMoreIcon />}
|
||||
aria-controls={`nodes-accordian-${node.name}`}
|
||||
id={`nodes-accordian-header-${node.name}`}
|
||||
{vsFlowData && vsFlowData.length > 0 && rmFlowData && rmFlowData.length > 0 && (
|
||||
<Card sx={{ borderColor: theme.palette.primary[200] + 75, p: 2 }} variant='outlined'>
|
||||
<Stack sx={{ mt: 1, mb: 2, ml: 1, alignItems: 'center' }} direction='row' spacing={2}>
|
||||
<SettingsIcon />
|
||||
<Typography variant='h4'>Configuration</Typography>
|
||||
</Stack>
|
||||
<Stack direction='column'>
|
||||
<TableContainer component={Paper} sx={{ maxHeight: '400px', overflow: 'auto' }}>
|
||||
<Table sx={{ minWidth: 650 }} aria-label='simple table'>
|
||||
<TableBody>
|
||||
<TableRow sx={{ '& td': { border: 0 } }}>
|
||||
<TableCell sx={{ pb: 0, pt: 0 }} colSpan={6}>
|
||||
<Box>
|
||||
{([...vsFlowData, ...rmFlowData] || []).map((node, index) => {
|
||||
return (
|
||||
<Accordion
|
||||
expanded={nodeConfigExpanded[node.name] || false}
|
||||
onChange={handleAccordionChange(node.name)}
|
||||
key={index}
|
||||
disableGutters
|
||||
>
|
||||
<div
|
||||
style={{ display: 'flex', flexDirection: 'row', alignItems: 'center' }}
|
||||
<AccordionSummary
|
||||
expandIcon={<ExpandMoreIcon />}
|
||||
aria-controls={`nodes-accordian-${node.name}`}
|
||||
id={`nodes-accordian-header-${node.name}`}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
width: 40,
|
||||
height: 40,
|
||||
marginRight: 10,
|
||||
borderRadius: '50%',
|
||||
backgroundColor: 'white'
|
||||
display: 'flex',
|
||||
flexDirection: 'row',
|
||||
alignItems: 'center'
|
||||
}}
|
||||
>
|
||||
<img
|
||||
<div
|
||||
style={{
|
||||
width: '100%',
|
||||
height: '100%',
|
||||
padding: 7,
|
||||
width: 40,
|
||||
height: 40,
|
||||
marginRight: 10,
|
||||
borderRadius: '50%',
|
||||
objectFit: 'contain'
|
||||
backgroundColor: 'white'
|
||||
}}
|
||||
alt={node.name}
|
||||
src={`${baseURL}/api/v1/node-icon/${node.name}`}
|
||||
/>
|
||||
>
|
||||
<img
|
||||
style={{
|
||||
width: '100%',
|
||||
height: '100%',
|
||||
padding: 7,
|
||||
borderRadius: '50%',
|
||||
objectFit: 'contain'
|
||||
}}
|
||||
alt={node.name}
|
||||
src={`${baseURL}/api/v1/node-icon/${node.name}`}
|
||||
/>
|
||||
</div>
|
||||
<Typography variant='h5'>{node.label}</Typography>
|
||||
</div>
|
||||
<Typography variant='h5'>{node.label}</Typography>
|
||||
</div>
|
||||
</AccordionSummary>
|
||||
<AccordionDetails>
|
||||
{node.paramValues[0] && (
|
||||
<TableViewOnly
|
||||
sx={{ minWidth: 150 }}
|
||||
rows={node.paramValues}
|
||||
columns={Object.keys(node.paramValues[0])}
|
||||
/>
|
||||
)}
|
||||
</AccordionDetails>
|
||||
</Accordion>
|
||||
)
|
||||
})}
|
||||
</Box>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
</TableBody>
|
||||
</Table>
|
||||
</TableContainer>
|
||||
<span style={{ marginTop: '30px', fontStyle: 'italic', color: '#b35702' }}>
|
||||
* Only data that were upserted with Record Manager will be deleted from vector store
|
||||
</span>
|
||||
</div>
|
||||
</AccordionSummary>
|
||||
<AccordionDetails sx={{ p: 0 }}>
|
||||
{node.paramValues[0] && (
|
||||
<TableViewOnly
|
||||
sx={{ minWidth: 150 }}
|
||||
rows={node.paramValues}
|
||||
columns={Object.keys(node.paramValues[0])}
|
||||
/>
|
||||
)}
|
||||
</AccordionDetails>
|
||||
</Accordion>
|
||||
)
|
||||
})}
|
||||
</Box>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
</TableBody>
|
||||
</Table>
|
||||
</TableContainer>
|
||||
</Stack>
|
||||
</Card>
|
||||
)}
|
||||
</DialogContent>
|
||||
<DialogActions sx={{ pr: 3, pb: 3 }}>
|
||||
<Button onClick={onCancel} color='primary'>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='contained' onClick={() => onDelete(dialogProps.type, dialogProps.file, removeFromVS)} color='error'>
|
||||
<Button variant='contained' onClick={() => onDelete(dialogProps.type, dialogProps.file)} color='error'>
|
||||
Delete
|
||||
</Button>
|
||||
</DialogActions>
|
||||
|
|
|
|||
|
|
@ -186,19 +186,19 @@ const DocumentStoreDetails = () => {
|
|||
setShowDocumentLoaderListDialog(true)
|
||||
}
|
||||
|
||||
const deleteVectorStoreDataFromStore = async (storeId) => {
|
||||
const deleteVectorStoreDataFromStore = async (storeId, docId) => {
|
||||
try {
|
||||
await documentsApi.deleteVectorStoreDataFromStore(storeId)
|
||||
await documentsApi.deleteVectorStoreDataFromStore(storeId, docId)
|
||||
} catch (error) {
|
||||
console.error(error)
|
||||
}
|
||||
}
|
||||
|
||||
const onDocStoreDelete = async (type, file, removeFromVectorStore) => {
|
||||
const onDocStoreDelete = async (type, file) => {
|
||||
setBackdropLoading(true)
|
||||
setShowDeleteDocStoreDialog(false)
|
||||
if (type === 'STORE') {
|
||||
if (removeFromVectorStore) {
|
||||
if (documentStore.recordManagerConfig) {
|
||||
await deleteVectorStoreDataFromStore(storeId)
|
||||
}
|
||||
try {
|
||||
|
|
@ -239,6 +239,9 @@ const DocumentStoreDetails = () => {
|
|||
})
|
||||
}
|
||||
} else if (type === 'LOADER') {
|
||||
if (documentStore.recordManagerConfig) {
|
||||
await deleteVectorStoreDataFromStore(storeId, file.id)
|
||||
}
|
||||
try {
|
||||
const deleteResp = await documentsApi.deleteLoaderFromStore(storeId, file.id)
|
||||
setBackdropLoading(false)
|
||||
|
|
@ -280,9 +283,40 @@ const DocumentStoreDetails = () => {
|
|||
}
|
||||
|
||||
const onLoaderDelete = (file, vectorStoreConfig, recordManagerConfig) => {
|
||||
// Get the display name in the format "LoaderName (sourceName)"
|
||||
const loaderName = file.loaderName || 'Unknown'
|
||||
let sourceName = ''
|
||||
|
||||
// Prefer files.name when files array exists and has items
|
||||
if (file.files && Array.isArray(file.files) && file.files.length > 0) {
|
||||
sourceName = file.files.map((f) => f.name).join(', ')
|
||||
} else if (file.source) {
|
||||
// Fallback to source logic
|
||||
if (typeof file.source === 'string' && file.source.includes('base64')) {
|
||||
sourceName = getFileName(file.source)
|
||||
} else if (typeof file.source === 'string' && file.source.startsWith('[') && file.source.endsWith(']')) {
|
||||
sourceName = JSON.parse(file.source).join(', ')
|
||||
} else if (typeof file.source === 'string') {
|
||||
sourceName = file.source
|
||||
}
|
||||
}
|
||||
|
||||
const displayName = sourceName ? `${loaderName} (${sourceName})` : loaderName
|
||||
|
||||
let description = `Delete "${displayName}"? This will delete all the associated document chunks from the document store.`
|
||||
|
||||
if (
|
||||
recordManagerConfig &&
|
||||
vectorStoreConfig &&
|
||||
Object.keys(recordManagerConfig).length > 0 &&
|
||||
Object.keys(vectorStoreConfig).length > 0
|
||||
) {
|
||||
description = `Delete "${displayName}"? This will delete all the associated document chunks from the document store and remove the actual data from the vector store database.`
|
||||
}
|
||||
|
||||
const props = {
|
||||
title: `Delete`,
|
||||
description: `Delete Loader ${file.loaderName} ? This will delete all the associated document chunks.`,
|
||||
description,
|
||||
vectorStoreConfig,
|
||||
recordManagerConfig,
|
||||
type: 'LOADER',
|
||||
|
|
@ -294,9 +328,20 @@ const DocumentStoreDetails = () => {
|
|||
}
|
||||
|
||||
const onStoreDelete = (vectorStoreConfig, recordManagerConfig) => {
|
||||
let description = `Delete Store ${getSpecificDocumentStore.data?.name}? This will delete all the associated loaders and document chunks from the document store.`
|
||||
|
||||
if (
|
||||
recordManagerConfig &&
|
||||
vectorStoreConfig &&
|
||||
Object.keys(recordManagerConfig).length > 0 &&
|
||||
Object.keys(vectorStoreConfig).length > 0
|
||||
) {
|
||||
description = `Delete Store ${getSpecificDocumentStore.data?.name}? This will delete all the associated loaders and document chunks from the document store, and remove the actual data from the vector store database.`
|
||||
}
|
||||
|
||||
const props = {
|
||||
title: `Delete`,
|
||||
description: `Delete Store ${getSpecificDocumentStore.data?.name} ? This will delete all the associated loaders and document chunks.`,
|
||||
description,
|
||||
vectorStoreConfig,
|
||||
recordManagerConfig,
|
||||
type: 'STORE'
|
||||
|
|
@ -481,7 +526,10 @@ const DocumentStoreDetails = () => {
|
|||
>
|
||||
<MenuItem
|
||||
disabled={documentStore?.totalChunks <= 0 || documentStore?.status === 'UPSERTING'}
|
||||
onClick={() => showStoredChunks('all')}
|
||||
onClick={() => {
|
||||
handleClose()
|
||||
showStoredChunks('all')
|
||||
}}
|
||||
disableRipple
|
||||
>
|
||||
<FileChunksIcon />
|
||||
|
|
@ -490,7 +538,10 @@ const DocumentStoreDetails = () => {
|
|||
<Available permission={'documentStores:upsert-config'}>
|
||||
<MenuItem
|
||||
disabled={documentStore?.totalChunks <= 0 || documentStore?.status === 'UPSERTING'}
|
||||
onClick={() => showVectorStore(documentStore.id)}
|
||||
onClick={() => {
|
||||
handleClose()
|
||||
showVectorStore(documentStore.id)
|
||||
}}
|
||||
disableRipple
|
||||
>
|
||||
<NoteAddIcon />
|
||||
|
|
@ -499,7 +550,10 @@ const DocumentStoreDetails = () => {
|
|||
</Available>
|
||||
<MenuItem
|
||||
disabled={documentStore?.totalChunks <= 0 || documentStore?.status !== 'UPSERTED'}
|
||||
onClick={() => showVectorStoreQuery(documentStore.id)}
|
||||
onClick={() => {
|
||||
handleClose()
|
||||
showVectorStoreQuery(documentStore.id)
|
||||
}}
|
||||
disableRipple
|
||||
>
|
||||
<SearchIcon />
|
||||
|
|
@ -518,7 +572,10 @@ const DocumentStoreDetails = () => {
|
|||
</Available>
|
||||
<Divider sx={{ my: 0.5 }} />
|
||||
<MenuItem
|
||||
onClick={() => onStoreDelete(documentStore.vectorStoreConfig, documentStore.recordManagerConfig)}
|
||||
onClick={() => {
|
||||
handleClose()
|
||||
onStoreDelete(documentStore.vectorStoreConfig, documentStore.recordManagerConfig)
|
||||
}}
|
||||
disableRipple
|
||||
>
|
||||
<FileDeleteIcon />
|
||||
|
|
@ -756,20 +813,26 @@ function LoaderRow(props) {
|
|||
setAnchorEl(null)
|
||||
}
|
||||
|
||||
const formatSources = (files, source) => {
|
||||
const formatSources = (files, source, loaderName) => {
|
||||
let sourceName = ''
|
||||
|
||||
// Prefer files.name when files array exists and has items
|
||||
if (files && Array.isArray(files) && files.length > 0) {
|
||||
return files.map((file) => file.name).join(', ')
|
||||
sourceName = files.map((file) => file.name).join(', ')
|
||||
} else if (source && typeof source === 'string' && source.includes('base64')) {
|
||||
// Fallback to original source logic
|
||||
sourceName = getFileName(source)
|
||||
} else if (source && typeof source === 'string' && source.startsWith('[') && source.endsWith(']')) {
|
||||
sourceName = JSON.parse(source).join(', ')
|
||||
} else if (source) {
|
||||
sourceName = source
|
||||
}
|
||||
|
||||
// Fallback to original source logic
|
||||
if (source && typeof source === 'string' && source.includes('base64')) {
|
||||
return getFileName(source)
|
||||
// Return format: "LoaderName (sourceName)" or just "LoaderName" if no source
|
||||
if (!sourceName) {
|
||||
return loaderName || 'No source'
|
||||
}
|
||||
if (source && typeof source === 'string' && source.startsWith('[') && source.endsWith(']')) {
|
||||
return JSON.parse(source).join(', ')
|
||||
}
|
||||
return source || 'No source'
|
||||
return loaderName ? `${loaderName} (${sourceName})` : sourceName
|
||||
}
|
||||
|
||||
return (
|
||||
|
|
@ -823,32 +886,62 @@ function LoaderRow(props) {
|
|||
onClose={handleClose}
|
||||
>
|
||||
<Available permission={'documentStores:preview-process'}>
|
||||
<MenuItem onClick={props.onEditClick} disableRipple>
|
||||
<MenuItem
|
||||
onClick={() => {
|
||||
handleClose()
|
||||
props.onEditClick()
|
||||
}}
|
||||
disableRipple
|
||||
>
|
||||
<FileEditIcon />
|
||||
Preview & Process
|
||||
</MenuItem>
|
||||
</Available>
|
||||
<Available permission={'documentStores:preview-process'}>
|
||||
<MenuItem onClick={props.onViewChunksClick} disableRipple>
|
||||
<MenuItem
|
||||
onClick={() => {
|
||||
handleClose()
|
||||
props.onViewChunksClick()
|
||||
}}
|
||||
disableRipple
|
||||
>
|
||||
<FileChunksIcon />
|
||||
View & Edit Chunks
|
||||
</MenuItem>
|
||||
</Available>
|
||||
<Available permission={'documentStores:preview-process'}>
|
||||
<MenuItem onClick={props.onChunkUpsert} disableRipple>
|
||||
<MenuItem
|
||||
onClick={() => {
|
||||
handleClose()
|
||||
props.onChunkUpsert()
|
||||
}}
|
||||
disableRipple
|
||||
>
|
||||
<NoteAddIcon />
|
||||
Upsert Chunks
|
||||
</MenuItem>
|
||||
</Available>
|
||||
<Available permission={'documentStores:preview-process'}>
|
||||
<MenuItem onClick={props.onViewUpsertAPI} disableRipple>
|
||||
<MenuItem
|
||||
onClick={() => {
|
||||
handleClose()
|
||||
props.onViewUpsertAPI()
|
||||
}}
|
||||
disableRipple
|
||||
>
|
||||
<CodeIcon />
|
||||
View API
|
||||
</MenuItem>
|
||||
</Available>
|
||||
<Divider sx={{ my: 0.5 }} />
|
||||
<Available permission={'documentStores:delete-loader'}>
|
||||
<MenuItem onClick={props.onDeleteClick} disableRipple>
|
||||
<MenuItem
|
||||
onClick={() => {
|
||||
handleClose()
|
||||
props.onDeleteClick()
|
||||
}}
|
||||
disableRipple
|
||||
>
|
||||
<FileDeleteIcon />
|
||||
Delete
|
||||
</MenuItem>
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ import useApi from '@/hooks/useApi'
|
|||
import useConfirm from '@/hooks/useConfirm'
|
||||
import useNotifier from '@/utils/useNotifier'
|
||||
import { useAuth } from '@/hooks/useAuth'
|
||||
import { getFileName } from '@/utils/genericHelper'
|
||||
|
||||
// store
|
||||
import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions'
|
||||
|
|
@ -76,6 +77,7 @@ const ShowStoredChunks = () => {
|
|||
const [showExpandedChunkDialog, setShowExpandedChunkDialog] = useState(false)
|
||||
const [expandedChunkDialogProps, setExpandedChunkDialogProps] = useState({})
|
||||
const [fileNames, setFileNames] = useState([])
|
||||
const [loaderDisplayName, setLoaderDisplayName] = useState('')
|
||||
|
||||
const chunkSelected = (chunkId) => {
|
||||
const selectedChunk = documentChunks.find((chunk) => chunk.id === chunkId)
|
||||
|
|
@ -212,13 +214,32 @@ const ShowStoredChunks = () => {
|
|||
setCurrentPage(data.currentPage)
|
||||
setStart(data.currentPage * 50 - 49)
|
||||
setEnd(data.currentPage * 50 > data.count ? data.count : data.currentPage * 50)
|
||||
|
||||
// Build the loader display name in format "LoaderName (sourceName)"
|
||||
const loaderName = data.file?.loaderName || data.storeName || ''
|
||||
let sourceName = ''
|
||||
|
||||
if (data.file?.files && data.file.files.length > 0) {
|
||||
const fileNames = []
|
||||
for (const attachedFile of data.file.files) {
|
||||
fileNames.push(attachedFile.name)
|
||||
}
|
||||
setFileNames(fileNames)
|
||||
sourceName = fileNames.join(', ')
|
||||
} else if (data.file?.source) {
|
||||
const source = data.file.source
|
||||
if (typeof source === 'string' && source.includes('base64')) {
|
||||
sourceName = getFileName(source)
|
||||
} else if (typeof source === 'string' && source.startsWith('[') && source.endsWith(']')) {
|
||||
sourceName = JSON.parse(source).join(', ')
|
||||
} else if (typeof source === 'string') {
|
||||
sourceName = source
|
||||
}
|
||||
}
|
||||
|
||||
// Set display name in format "LoaderName (sourceName)" or just "LoaderName"
|
||||
const displayName = sourceName ? `${loaderName} (${sourceName})` : loaderName
|
||||
setLoaderDisplayName(displayName)
|
||||
}
|
||||
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
|
|
@ -234,7 +255,7 @@ const ShowStoredChunks = () => {
|
|||
<ViewHeader
|
||||
isBackButton={true}
|
||||
search={false}
|
||||
title={getChunksApi.data?.file?.loaderName || getChunksApi.data?.storeName}
|
||||
title={loaderDisplayName}
|
||||
description={getChunksApi.data?.file?.splitterName || getChunksApi.data?.description}
|
||||
onBack={() => navigate(-1)}
|
||||
></ViewHeader>
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ import Storage from '@mui/icons-material/Storage'
|
|||
import DynamicFeed from '@mui/icons-material/Filter1'
|
||||
|
||||
// utils
|
||||
import { initNode, showHideInputParams } from '@/utils/genericHelper'
|
||||
import { initNode, showHideInputParams, getFileName } from '@/utils/genericHelper'
|
||||
import useNotifier from '@/utils/useNotifier'
|
||||
|
||||
// const
|
||||
|
|
@ -69,6 +69,7 @@ const VectorStoreConfigure = () => {
|
|||
const [loading, setLoading] = useState(true)
|
||||
const [documentStore, setDocumentStore] = useState({})
|
||||
const [dialogProps, setDialogProps] = useState({})
|
||||
const [currentLoader, setCurrentLoader] = useState(null)
|
||||
|
||||
const [showEmbeddingsListDialog, setShowEmbeddingsListDialog] = useState(false)
|
||||
const [selectedEmbeddingsProvider, setSelectedEmbeddingsProvider] = useState({})
|
||||
|
|
@ -245,7 +246,8 @@ const VectorStoreConfigure = () => {
|
|||
const prepareConfigData = () => {
|
||||
const data = {
|
||||
storeId: storeId,
|
||||
docId: docId
|
||||
docId: docId,
|
||||
isStrictSave: true
|
||||
}
|
||||
// Set embedding config
|
||||
if (selectedEmbeddingsProvider.inputs) {
|
||||
|
|
@ -353,6 +355,39 @@ const VectorStoreConfigure = () => {
|
|||
return Object.keys(selectedEmbeddingsProvider).length === 0
|
||||
}
|
||||
|
||||
const getLoaderDisplayName = (loader) => {
|
||||
if (!loader) return ''
|
||||
|
||||
const loaderName = loader.loaderName || 'Unknown'
|
||||
let sourceName = ''
|
||||
|
||||
// Prefer files.name when files array exists and has items
|
||||
if (loader.files && Array.isArray(loader.files) && loader.files.length > 0) {
|
||||
sourceName = loader.files.map((file) => file.name).join(', ')
|
||||
} else if (loader.source) {
|
||||
// Fallback to source logic
|
||||
if (typeof loader.source === 'string' && loader.source.includes('base64')) {
|
||||
sourceName = getFileName(loader.source)
|
||||
} else if (typeof loader.source === 'string' && loader.source.startsWith('[') && loader.source.endsWith(']')) {
|
||||
sourceName = JSON.parse(loader.source).join(', ')
|
||||
} else if (typeof loader.source === 'string') {
|
||||
sourceName = loader.source
|
||||
}
|
||||
}
|
||||
|
||||
// Return format: "LoaderName (sourceName)" or just "LoaderName" if no source
|
||||
return sourceName ? `${loaderName} (${sourceName})` : loaderName
|
||||
}
|
||||
|
||||
const getViewHeaderTitle = () => {
|
||||
const storeName = getSpecificDocumentStoreApi.data?.name || ''
|
||||
if (docId && currentLoader) {
|
||||
const loaderName = getLoaderDisplayName(currentLoader)
|
||||
return `${storeName} / ${loaderName}`
|
||||
}
|
||||
return storeName
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (saveVectorStoreConfigApi.data) {
|
||||
setLoading(false)
|
||||
|
|
@ -411,6 +446,15 @@ const VectorStoreConfigure = () => {
|
|||
return
|
||||
}
|
||||
setDocumentStore(docStore)
|
||||
|
||||
// Find the current loader if docId is provided
|
||||
if (docId && docStore.loaders) {
|
||||
const loader = docStore.loaders.find((l) => l.id === docId)
|
||||
if (loader) {
|
||||
setCurrentLoader(loader)
|
||||
}
|
||||
}
|
||||
|
||||
if (docStore.embeddingConfig) {
|
||||
getEmbeddingNodeDetailsApi.request(docStore.embeddingConfig.name)
|
||||
}
|
||||
|
|
@ -473,7 +517,7 @@ const VectorStoreConfigure = () => {
|
|||
<ViewHeader
|
||||
isBackButton={true}
|
||||
search={false}
|
||||
title={getSpecificDocumentStoreApi.data?.name}
|
||||
title={getViewHeaderTitle()}
|
||||
description='Configure Embeddings, Vector Store and Record Manager'
|
||||
onBack={() => navigate(-1)}
|
||||
>
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue